davidanugraha commited on
Commit
a91f192
·
verified ·
1 Parent(s): 99707b0

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_nemotron_qwen3
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_nemotron_qwen3
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_nemotron_qwen3 dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 161507922542592.0,
4
+ "train_loss": 0.48762158089620206,
5
+ "train_runtime": 14310.7821,
6
+ "train_samples_per_second": 6.56,
7
+ "train_steps_per_second": 0.103
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c433b7e13ca1002bb332810706c23f4057e6e89212f0c728e000a981114a2e75
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f151e5a40220dd4497bfb35e89911a9045fdf4e104015b8322974ff9a512574f
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 161507922542592.0,
4
+ "train_loss": 0.48762158089620206,
5
+ "train_runtime": 14310.7821,
6
+ "train_samples_per_second": 6.56,
7
+ "train_steps_per_second": 0.103
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1467, "loss": 0.6922, "accuracy": 0.4390625059604645, "lr": 6.122448979591837e-08, "epoch": 0.006817503941369466, "percentage": 0.68, "elapsed_time": "0:01:42", "remaining_time": "4:09:10"}
2
+ {"current_steps": 20, "total_steps": 1467, "loss": 0.6933, "accuracy": 0.5015624761581421, "lr": 1.2925170068027211e-07, "epoch": 0.013635007882738932, "percentage": 1.36, "elapsed_time": "0:03:22", "remaining_time": "4:04:03"}
3
+ {"current_steps": 30, "total_steps": 1467, "loss": 0.6929, "accuracy": 0.5093750357627869, "lr": 1.9727891156462583e-07, "epoch": 0.0204525118241084, "percentage": 2.04, "elapsed_time": "0:05:02", "remaining_time": "4:01:52"}
4
+ {"current_steps": 40, "total_steps": 1467, "loss": 0.6925, "accuracy": 0.520312488079071, "lr": 2.653061224489796e-07, "epoch": 0.027270015765477863, "percentage": 2.73, "elapsed_time": "0:06:41", "remaining_time": "3:58:46"}
5
+ {"current_steps": 50, "total_steps": 1467, "loss": 0.6921, "accuracy": 0.520312488079071, "lr": 3.333333333333333e-07, "epoch": 0.03408751970684733, "percentage": 3.41, "elapsed_time": "0:08:18", "remaining_time": "3:55:22"}
6
+ {"current_steps": 60, "total_steps": 1467, "loss": 0.6919, "accuracy": 0.5218749642372131, "lr": 4.0136054421768705e-07, "epoch": 0.0409050236482168, "percentage": 4.09, "elapsed_time": "0:09:58", "remaining_time": "3:53:59"}
7
+ {"current_steps": 70, "total_steps": 1467, "loss": 0.6919, "accuracy": 0.5453125238418579, "lr": 4.693877551020408e-07, "epoch": 0.04772252758958626, "percentage": 4.77, "elapsed_time": "0:11:36", "remaining_time": "3:51:48"}
8
+ {"current_steps": 80, "total_steps": 1467, "loss": 0.6897, "accuracy": 0.5453125238418579, "lr": 5.374149659863945e-07, "epoch": 0.05454003153095573, "percentage": 5.45, "elapsed_time": "0:13:14", "remaining_time": "3:49:41"}
9
+ {"current_steps": 90, "total_steps": 1467, "loss": 0.6894, "accuracy": 0.5656249523162842, "lr": 6.054421768707482e-07, "epoch": 0.0613575354723252, "percentage": 6.13, "elapsed_time": "0:14:51", "remaining_time": "3:47:27"}
10
+ {"current_steps": 100, "total_steps": 1467, "loss": 0.6858, "accuracy": 0.620312511920929, "lr": 6.734693877551019e-07, "epoch": 0.06817503941369465, "percentage": 6.82, "elapsed_time": "0:16:30", "remaining_time": "3:45:35"}
11
+ {"current_steps": 110, "total_steps": 1467, "loss": 0.6826, "accuracy": 0.598437488079071, "lr": 7.414965986394558e-07, "epoch": 0.07499254335506413, "percentage": 7.5, "elapsed_time": "0:18:09", "remaining_time": "3:43:55"}
12
+ {"current_steps": 120, "total_steps": 1467, "loss": 0.6818, "accuracy": 0.6015625596046448, "lr": 8.095238095238095e-07, "epoch": 0.0818100472964336, "percentage": 8.18, "elapsed_time": "0:19:46", "remaining_time": "3:41:57"}
13
+ {"current_steps": 130, "total_steps": 1467, "loss": 0.6724, "accuracy": 0.6031250357627869, "lr": 8.775510204081632e-07, "epoch": 0.08862755123780305, "percentage": 8.86, "elapsed_time": "0:21:24", "remaining_time": "3:40:15"}
14
+ {"current_steps": 140, "total_steps": 1467, "loss": 0.6753, "accuracy": 0.6421875357627869, "lr": 9.45578231292517e-07, "epoch": 0.09544505517917252, "percentage": 9.54, "elapsed_time": "0:23:03", "remaining_time": "3:38:33"}
15
+ {"current_steps": 150, "total_steps": 1467, "loss": 0.6693, "accuracy": 0.6109374761581421, "lr": 9.984848484848486e-07, "epoch": 0.102262559120542, "percentage": 10.22, "elapsed_time": "0:24:41", "remaining_time": "3:36:49"}
16
+ {"current_steps": 160, "total_steps": 1467, "loss": 0.6553, "accuracy": 0.6578125357627869, "lr": 9.909090909090909e-07, "epoch": 0.10908006306191145, "percentage": 10.91, "elapsed_time": "0:26:20", "remaining_time": "3:35:11"}
17
+ {"current_steps": 170, "total_steps": 1467, "loss": 0.6546, "accuracy": 0.6500000357627869, "lr": 9.833333333333332e-07, "epoch": 0.11589756700328092, "percentage": 11.59, "elapsed_time": "0:27:59", "remaining_time": "3:33:35"}
18
+ {"current_steps": 180, "total_steps": 1467, "loss": 0.6469, "accuracy": 0.6687500476837158, "lr": 9.757575757575757e-07, "epoch": 0.1227150709446504, "percentage": 12.27, "elapsed_time": "0:29:37", "remaining_time": "3:31:52"}
19
+ {"current_steps": 190, "total_steps": 1467, "loss": 0.6389, "accuracy": 0.6812500357627869, "lr": 9.681818181818182e-07, "epoch": 0.12953257488601985, "percentage": 12.95, "elapsed_time": "0:31:16", "remaining_time": "3:30:11"}
20
+ {"current_steps": 200, "total_steps": 1467, "loss": 0.6291, "accuracy": 0.6609375476837158, "lr": 9.606060606060605e-07, "epoch": 0.1363500788273893, "percentage": 13.63, "elapsed_time": "0:32:54", "remaining_time": "3:28:27"}
21
+ {"current_steps": 210, "total_steps": 1467, "loss": 0.6342, "accuracy": 0.6749999523162842, "lr": 9.53030303030303e-07, "epoch": 0.1431675827687588, "percentage": 14.31, "elapsed_time": "0:34:33", "remaining_time": "3:26:51"}
22
+ {"current_steps": 220, "total_steps": 1467, "loss": 0.6291, "accuracy": 0.6609375476837158, "lr": 9.454545454545454e-07, "epoch": 0.14998508671012825, "percentage": 15.0, "elapsed_time": "0:36:12", "remaining_time": "3:25:13"}
23
+ {"current_steps": 230, "total_steps": 1467, "loss": 0.6212, "accuracy": 0.690625011920929, "lr": 9.378787878787879e-07, "epoch": 0.1568025906514977, "percentage": 15.68, "elapsed_time": "0:37:52", "remaining_time": "3:23:40"}
24
+ {"current_steps": 240, "total_steps": 1467, "loss": 0.6085, "accuracy": 0.6796875, "lr": 9.303030303030303e-07, "epoch": 0.1636200945928672, "percentage": 16.36, "elapsed_time": "0:39:30", "remaining_time": "3:22:01"}
25
+ {"current_steps": 250, "total_steps": 1467, "loss": 0.6233, "accuracy": 0.667187511920929, "lr": 9.227272727272727e-07, "epoch": 0.17043759853423665, "percentage": 17.04, "elapsed_time": "0:41:07", "remaining_time": "3:20:11"}
26
+ {"current_steps": 260, "total_steps": 1467, "loss": 0.6071, "accuracy": 0.6890625357627869, "lr": 9.151515151515152e-07, "epoch": 0.1772551024756061, "percentage": 17.72, "elapsed_time": "0:42:45", "remaining_time": "3:18:29"}
27
+ {"current_steps": 270, "total_steps": 1467, "loss": 0.6017, "accuracy": 0.6906250715255737, "lr": 9.075757575757576e-07, "epoch": 0.1840726064169756, "percentage": 18.4, "elapsed_time": "0:44:23", "remaining_time": "3:16:47"}
28
+ {"current_steps": 280, "total_steps": 1467, "loss": 0.6076, "accuracy": 0.6781250238418579, "lr": 9e-07, "epoch": 0.19089011035834505, "percentage": 19.09, "elapsed_time": "0:45:59", "remaining_time": "3:15:00"}
29
+ {"current_steps": 290, "total_steps": 1467, "loss": 0.5958, "accuracy": 0.6859375238418579, "lr": 8.924242424242425e-07, "epoch": 0.1977076142997145, "percentage": 19.77, "elapsed_time": "0:47:37", "remaining_time": "3:13:15"}
30
+ {"current_steps": 300, "total_steps": 1467, "loss": 0.569, "accuracy": 0.7328125238418579, "lr": 8.848484848484849e-07, "epoch": 0.204525118241084, "percentage": 20.45, "elapsed_time": "0:49:14", "remaining_time": "3:11:31"}
31
+ {"current_steps": 310, "total_steps": 1467, "loss": 0.5687, "accuracy": 0.7140624523162842, "lr": 8.772727272727273e-07, "epoch": 0.21134262218245345, "percentage": 21.13, "elapsed_time": "0:50:51", "remaining_time": "3:09:50"}
32
+ {"current_steps": 320, "total_steps": 1467, "loss": 0.5635, "accuracy": 0.7421875, "lr": 8.696969696969697e-07, "epoch": 0.2181601261238229, "percentage": 21.81, "elapsed_time": "0:52:31", "remaining_time": "3:08:17"}
33
+ {"current_steps": 330, "total_steps": 1467, "loss": 0.5596, "accuracy": 0.7343750596046448, "lr": 8.62121212121212e-07, "epoch": 0.2249776300651924, "percentage": 22.49, "elapsed_time": "0:54:10", "remaining_time": "3:06:40"}
34
+ {"current_steps": 340, "total_steps": 1467, "loss": 0.5679, "accuracy": 0.7046875357627869, "lr": 8.545454545454544e-07, "epoch": 0.23179513400656185, "percentage": 23.18, "elapsed_time": "0:55:48", "remaining_time": "3:04:58"}
35
+ {"current_steps": 350, "total_steps": 1467, "loss": 0.5859, "accuracy": 0.6796875, "lr": 8.469696969696968e-07, "epoch": 0.2386126379479313, "percentage": 23.86, "elapsed_time": "0:57:24", "remaining_time": "3:03:13"}
36
+ {"current_steps": 360, "total_steps": 1467, "loss": 0.5739, "accuracy": 0.698437511920929, "lr": 8.393939393939393e-07, "epoch": 0.2454301418893008, "percentage": 24.54, "elapsed_time": "0:59:03", "remaining_time": "3:01:37"}
37
+ {"current_steps": 370, "total_steps": 1467, "loss": 0.5378, "accuracy": 0.7234375476837158, "lr": 8.318181818181817e-07, "epoch": 0.2522476458306702, "percentage": 25.22, "elapsed_time": "1:00:40", "remaining_time": "2:59:52"}
38
+ {"current_steps": 380, "total_steps": 1467, "loss": 0.5508, "accuracy": 0.7296874523162842, "lr": 8.242424242424241e-07, "epoch": 0.2590651497720397, "percentage": 25.9, "elapsed_time": "1:02:17", "remaining_time": "2:58:09"}
39
+ {"current_steps": 390, "total_steps": 1467, "loss": 0.5618, "accuracy": 0.715624988079071, "lr": 8.166666666666666e-07, "epoch": 0.2658826537134092, "percentage": 26.58, "elapsed_time": "1:03:55", "remaining_time": "2:56:31"}
40
+ {"current_steps": 400, "total_steps": 1467, "loss": 0.5494, "accuracy": 0.721875011920929, "lr": 8.09090909090909e-07, "epoch": 0.2727001576547786, "percentage": 27.27, "elapsed_time": "1:05:33", "remaining_time": "2:54:52"}
41
+ {"current_steps": 410, "total_steps": 1467, "loss": 0.5462, "accuracy": 0.7343750596046448, "lr": 8.015151515151514e-07, "epoch": 0.2795176615961481, "percentage": 27.95, "elapsed_time": "1:07:10", "remaining_time": "2:53:11"}
42
+ {"current_steps": 420, "total_steps": 1467, "loss": 0.5532, "accuracy": 0.7265625, "lr": 7.939393939393939e-07, "epoch": 0.2863351655375176, "percentage": 28.63, "elapsed_time": "1:08:47", "remaining_time": "2:51:29"}
43
+ {"current_steps": 430, "total_steps": 1467, "loss": 0.5562, "accuracy": 0.703125, "lr": 7.863636363636363e-07, "epoch": 0.293152669478887, "percentage": 29.31, "elapsed_time": "1:10:26", "remaining_time": "2:49:52"}
44
+ {"current_steps": 440, "total_steps": 1467, "loss": 0.5384, "accuracy": 0.75, "lr": 7.787878787878787e-07, "epoch": 0.2999701734202565, "percentage": 29.99, "elapsed_time": "1:12:02", "remaining_time": "2:48:08"}
45
+ {"current_steps": 450, "total_steps": 1467, "loss": 0.539, "accuracy": 0.734375, "lr": 7.712121212121212e-07, "epoch": 0.306787677361626, "percentage": 30.67, "elapsed_time": "1:13:40", "remaining_time": "2:46:31"}
46
+ {"current_steps": 460, "total_steps": 1467, "loss": 0.5309, "accuracy": 0.7468750476837158, "lr": 7.636363636363636e-07, "epoch": 0.3136051813029954, "percentage": 31.36, "elapsed_time": "1:15:18", "remaining_time": "2:44:50"}
47
+ {"current_steps": 470, "total_steps": 1467, "loss": 0.5343, "accuracy": 0.7562500238418579, "lr": 7.56060606060606e-07, "epoch": 0.3204226852443649, "percentage": 32.04, "elapsed_time": "1:16:54", "remaining_time": "2:43:07"}
48
+ {"current_steps": 480, "total_steps": 1467, "loss": 0.5479, "accuracy": 0.7437500357627869, "lr": 7.484848484848485e-07, "epoch": 0.3272401891857344, "percentage": 32.72, "elapsed_time": "1:18:31", "remaining_time": "2:41:27"}
49
+ {"current_steps": 490, "total_steps": 1467, "loss": 0.5022, "accuracy": 0.7562500238418579, "lr": 7.409090909090909e-07, "epoch": 0.3340576931271038, "percentage": 33.4, "elapsed_time": "1:20:08", "remaining_time": "2:39:47"}
50
+ {"current_steps": 500, "total_steps": 1467, "loss": 0.4879, "accuracy": 0.7875000238418579, "lr": 7.333333333333332e-07, "epoch": 0.3408751970684733, "percentage": 34.08, "elapsed_time": "1:21:44", "remaining_time": "2:38:06"}
51
+ {"current_steps": 510, "total_steps": 1467, "loss": 0.5303, "accuracy": 0.7312500476837158, "lr": 7.257575757575756e-07, "epoch": 0.3476927010098428, "percentage": 34.76, "elapsed_time": "1:23:51", "remaining_time": "2:37:20"}
52
+ {"current_steps": 520, "total_steps": 1467, "loss": 0.5015, "accuracy": 0.7671874761581421, "lr": 7.181818181818181e-07, "epoch": 0.3545102049512122, "percentage": 35.45, "elapsed_time": "1:25:27", "remaining_time": "2:35:38"}
53
+ {"current_steps": 530, "total_steps": 1467, "loss": 0.5252, "accuracy": 0.746874988079071, "lr": 7.106060606060605e-07, "epoch": 0.3613277088925817, "percentage": 36.13, "elapsed_time": "1:27:04", "remaining_time": "2:33:56"}
54
+ {"current_steps": 540, "total_steps": 1467, "loss": 0.4807, "accuracy": 0.793749988079071, "lr": 7.030303030303029e-07, "epoch": 0.3681452128339512, "percentage": 36.81, "elapsed_time": "1:28:42", "remaining_time": "2:32:17"}
55
+ {"current_steps": 550, "total_steps": 1467, "loss": 0.501, "accuracy": 0.765625, "lr": 6.954545454545454e-07, "epoch": 0.3749627167753206, "percentage": 37.49, "elapsed_time": "1:30:17", "remaining_time": "2:30:31"}
56
+ {"current_steps": 560, "total_steps": 1467, "loss": 0.4941, "accuracy": 0.7593750357627869, "lr": 6.878787878787878e-07, "epoch": 0.3817802207166901, "percentage": 38.17, "elapsed_time": "1:31:53", "remaining_time": "2:28:50"}
57
+ {"current_steps": 570, "total_steps": 1467, "loss": 0.5234, "accuracy": 0.7406250238418579, "lr": 6.803030303030302e-07, "epoch": 0.3885977246580596, "percentage": 38.85, "elapsed_time": "1:33:30", "remaining_time": "2:27:09"}
58
+ {"current_steps": 580, "total_steps": 1467, "loss": 0.4888, "accuracy": 0.785937488079071, "lr": 6.727272727272727e-07, "epoch": 0.395415228599429, "percentage": 39.54, "elapsed_time": "1:35:05", "remaining_time": "2:25:26"}
59
+ {"current_steps": 590, "total_steps": 1467, "loss": 0.4718, "accuracy": 0.770312488079071, "lr": 6.651515151515151e-07, "epoch": 0.4022327325407985, "percentage": 40.22, "elapsed_time": "1:36:42", "remaining_time": "2:23:45"}
60
+ {"current_steps": 600, "total_steps": 1467, "loss": 0.503, "accuracy": 0.75, "lr": 6.575757575757575e-07, "epoch": 0.409050236482168, "percentage": 40.9, "elapsed_time": "1:38:21", "remaining_time": "2:22:07"}
61
+ {"current_steps": 610, "total_steps": 1467, "loss": 0.4939, "accuracy": 0.753125011920929, "lr": 6.5e-07, "epoch": 0.4158677404235374, "percentage": 41.58, "elapsed_time": "1:39:58", "remaining_time": "2:20:27"}
62
+ {"current_steps": 620, "total_steps": 1467, "loss": 0.5032, "accuracy": 0.7640625238418579, "lr": 6.424242424242424e-07, "epoch": 0.4226852443649069, "percentage": 42.26, "elapsed_time": "1:41:35", "remaining_time": "2:18:47"}
63
+ {"current_steps": 630, "total_steps": 1467, "loss": 0.4516, "accuracy": 0.7953125238418579, "lr": 6.348484848484848e-07, "epoch": 0.4295027483062764, "percentage": 42.94, "elapsed_time": "1:43:12", "remaining_time": "2:17:07"}
64
+ {"current_steps": 640, "total_steps": 1467, "loss": 0.481, "accuracy": 0.7671875357627869, "lr": 6.272727272727273e-07, "epoch": 0.4363202522476458, "percentage": 43.63, "elapsed_time": "1:44:49", "remaining_time": "2:15:27"}
65
+ {"current_steps": 650, "total_steps": 1467, "loss": 0.4669, "accuracy": 0.776562511920929, "lr": 6.196969696969697e-07, "epoch": 0.4431377561890153, "percentage": 44.31, "elapsed_time": "1:46:25", "remaining_time": "2:13:46"}
66
+ {"current_steps": 660, "total_steps": 1467, "loss": 0.5139, "accuracy": 0.7328125238418579, "lr": 6.12121212121212e-07, "epoch": 0.4499552601303848, "percentage": 44.99, "elapsed_time": "1:48:01", "remaining_time": "2:12:05"}
67
+ {"current_steps": 670, "total_steps": 1467, "loss": 0.4842, "accuracy": 0.7515625357627869, "lr": 6.045454545454545e-07, "epoch": 0.4567727640717542, "percentage": 45.67, "elapsed_time": "1:49:37", "remaining_time": "2:10:23"}
68
+ {"current_steps": 680, "total_steps": 1467, "loss": 0.4707, "accuracy": 0.7796874642372131, "lr": 5.969696969696969e-07, "epoch": 0.4635902680131237, "percentage": 46.35, "elapsed_time": "1:51:14", "remaining_time": "2:08:44"}
69
+ {"current_steps": 690, "total_steps": 1467, "loss": 0.463, "accuracy": 0.7640625238418579, "lr": 5.893939393939393e-07, "epoch": 0.4704077719544932, "percentage": 47.03, "elapsed_time": "1:52:50", "remaining_time": "2:07:04"}
70
+ {"current_steps": 700, "total_steps": 1467, "loss": 0.4424, "accuracy": 0.7937500476837158, "lr": 5.818181818181818e-07, "epoch": 0.4772252758958626, "percentage": 47.72, "elapsed_time": "1:54:27", "remaining_time": "2:05:24"}
71
+ {"current_steps": 710, "total_steps": 1467, "loss": 0.4667, "accuracy": 0.7796875238418579, "lr": 5.742424242424242e-07, "epoch": 0.4840427798372321, "percentage": 48.4, "elapsed_time": "1:56:05", "remaining_time": "2:03:46"}
72
+ {"current_steps": 720, "total_steps": 1467, "loss": 0.4639, "accuracy": 0.784375011920929, "lr": 5.666666666666666e-07, "epoch": 0.4908602837786016, "percentage": 49.08, "elapsed_time": "1:57:43", "remaining_time": "2:02:08"}
73
+ {"current_steps": 730, "total_steps": 1467, "loss": 0.4337, "accuracy": 0.8078125715255737, "lr": 5.590909090909091e-07, "epoch": 0.497677787719971, "percentage": 49.76, "elapsed_time": "1:59:19", "remaining_time": "2:00:28"}
74
+ {"current_steps": 740, "total_steps": 1467, "loss": 0.4669, "accuracy": 0.762499988079071, "lr": 5.515151515151515e-07, "epoch": 0.5044952916613404, "percentage": 50.44, "elapsed_time": "2:00:56", "remaining_time": "1:58:49"}
75
+ {"current_steps": 750, "total_steps": 1467, "loss": 0.4467, "accuracy": 0.78125, "lr": 5.439393939393939e-07, "epoch": 0.5113127956027099, "percentage": 51.12, "elapsed_time": "2:02:32", "remaining_time": "1:57:08"}
76
+ {"current_steps": 760, "total_steps": 1467, "loss": 0.4594, "accuracy": 0.796875, "lr": 5.363636363636363e-07, "epoch": 0.5181302995440794, "percentage": 51.81, "elapsed_time": "2:04:07", "remaining_time": "1:55:28"}
77
+ {"current_steps": 770, "total_steps": 1467, "loss": 0.4376, "accuracy": 0.8312500715255737, "lr": 5.287878787878788e-07, "epoch": 0.5249478034854489, "percentage": 52.49, "elapsed_time": "2:05:43", "remaining_time": "1:53:48"}
78
+ {"current_steps": 780, "total_steps": 1467, "loss": 0.4695, "accuracy": 0.7578125, "lr": 5.212121212121212e-07, "epoch": 0.5317653074268184, "percentage": 53.17, "elapsed_time": "2:07:19", "remaining_time": "1:52:08"}
79
+ {"current_steps": 790, "total_steps": 1467, "loss": 0.4579, "accuracy": 0.7906250357627869, "lr": 5.136363636363636e-07, "epoch": 0.5385828113681879, "percentage": 53.85, "elapsed_time": "2:08:55", "remaining_time": "1:50:28"}
80
+ {"current_steps": 800, "total_steps": 1467, "loss": 0.4123, "accuracy": 0.8250000476837158, "lr": 5.060606060606061e-07, "epoch": 0.5454003153095572, "percentage": 54.53, "elapsed_time": "2:10:32", "remaining_time": "1:48:50"}
81
+ {"current_steps": 810, "total_steps": 1467, "loss": 0.4335, "accuracy": 0.785937488079071, "lr": 4.984848484848485e-07, "epoch": 0.5522178192509267, "percentage": 55.21, "elapsed_time": "2:12:09", "remaining_time": "1:47:11"}
82
+ {"current_steps": 820, "total_steps": 1467, "loss": 0.4561, "accuracy": 0.7875000238418579, "lr": 4.909090909090909e-07, "epoch": 0.5590353231922962, "percentage": 55.9, "elapsed_time": "2:13:44", "remaining_time": "1:45:31"}
83
+ {"current_steps": 830, "total_steps": 1467, "loss": 0.4395, "accuracy": 0.8062499761581421, "lr": 4.833333333333333e-07, "epoch": 0.5658528271336657, "percentage": 56.58, "elapsed_time": "2:15:20", "remaining_time": "1:43:52"}
84
+ {"current_steps": 840, "total_steps": 1467, "loss": 0.4585, "accuracy": 0.7874999642372131, "lr": 4.7575757575757574e-07, "epoch": 0.5726703310750352, "percentage": 57.26, "elapsed_time": "2:16:56", "remaining_time": "1:42:12"}
85
+ {"current_steps": 850, "total_steps": 1467, "loss": 0.4487, "accuracy": 0.785937488079071, "lr": 4.681818181818182e-07, "epoch": 0.5794878350164047, "percentage": 57.94, "elapsed_time": "2:18:33", "remaining_time": "1:40:34"}
86
+ {"current_steps": 860, "total_steps": 1467, "loss": 0.4466, "accuracy": 0.8109375238418579, "lr": 4.606060606060606e-07, "epoch": 0.586305338957774, "percentage": 58.62, "elapsed_time": "2:20:11", "remaining_time": "1:38:56"}
87
+ {"current_steps": 870, "total_steps": 1467, "loss": 0.4168, "accuracy": 0.8296875357627869, "lr": 4.53030303030303e-07, "epoch": 0.5931228428991435, "percentage": 59.3, "elapsed_time": "2:21:49", "remaining_time": "1:37:19"}
88
+ {"current_steps": 880, "total_steps": 1467, "loss": 0.4419, "accuracy": 0.776562511920929, "lr": 4.4545454545454544e-07, "epoch": 0.599940346840513, "percentage": 59.99, "elapsed_time": "2:23:26", "remaining_time": "1:35:40"}
89
+ {"current_steps": 890, "total_steps": 1467, "loss": 0.4338, "accuracy": 0.7953125238418579, "lr": 4.3787878787878784e-07, "epoch": 0.6067578507818825, "percentage": 60.67, "elapsed_time": "2:25:01", "remaining_time": "1:34:01"}
90
+ {"current_steps": 900, "total_steps": 1467, "loss": 0.4428, "accuracy": 0.8062500357627869, "lr": 4.303030303030303e-07, "epoch": 0.613575354723252, "percentage": 61.35, "elapsed_time": "2:26:38", "remaining_time": "1:32:22"}
91
+ {"current_steps": 910, "total_steps": 1467, "loss": 0.4444, "accuracy": 0.7890625, "lr": 4.227272727272727e-07, "epoch": 0.6203928586646215, "percentage": 62.03, "elapsed_time": "2:28:14", "remaining_time": "1:30:44"}
92
+ {"current_steps": 920, "total_steps": 1467, "loss": 0.4204, "accuracy": 0.8125, "lr": 4.1515151515151513e-07, "epoch": 0.6272103626059908, "percentage": 62.71, "elapsed_time": "2:29:48", "remaining_time": "1:29:04"}
93
+ {"current_steps": 930, "total_steps": 1467, "loss": 0.4137, "accuracy": 0.8343750238418579, "lr": 4.075757575757576e-07, "epoch": 0.6340278665473603, "percentage": 63.39, "elapsed_time": "2:31:28", "remaining_time": "1:27:27"}
94
+ {"current_steps": 940, "total_steps": 1467, "loss": 0.4011, "accuracy": 0.8265625238418579, "lr": 4e-07, "epoch": 0.6408453704887298, "percentage": 64.08, "elapsed_time": "2:33:04", "remaining_time": "1:25:49"}
95
+ {"current_steps": 950, "total_steps": 1467, "loss": 0.4295, "accuracy": 0.796875, "lr": 3.924242424242424e-07, "epoch": 0.6476628744300993, "percentage": 64.76, "elapsed_time": "2:34:40", "remaining_time": "1:24:10"}
96
+ {"current_steps": 960, "total_steps": 1467, "loss": 0.4118, "accuracy": 0.7984375357627869, "lr": 3.8484848484848483e-07, "epoch": 0.6544803783714688, "percentage": 65.44, "elapsed_time": "2:36:16", "remaining_time": "1:22:32"}
97
+ {"current_steps": 970, "total_steps": 1467, "loss": 0.4357, "accuracy": 0.8046875, "lr": 3.7727272727272723e-07, "epoch": 0.6612978823128383, "percentage": 66.12, "elapsed_time": "2:37:50", "remaining_time": "1:20:52"}
98
+ {"current_steps": 980, "total_steps": 1467, "loss": 0.4098, "accuracy": 0.8203125596046448, "lr": 3.696969696969697e-07, "epoch": 0.6681153862542076, "percentage": 66.8, "elapsed_time": "2:39:26", "remaining_time": "1:19:14"}
99
+ {"current_steps": 990, "total_steps": 1467, "loss": 0.397, "accuracy": 0.828125, "lr": 3.6212121212121213e-07, "epoch": 0.6749328901955771, "percentage": 67.48, "elapsed_time": "2:41:02", "remaining_time": "1:17:35"}
100
+ {"current_steps": 1000, "total_steps": 1467, "loss": 0.4264, "accuracy": 0.8031250238418579, "lr": 3.545454545454545e-07, "epoch": 0.6817503941369466, "percentage": 68.17, "elapsed_time": "2:42:40", "remaining_time": "1:15:58"}
101
+ {"current_steps": 1010, "total_steps": 1467, "loss": 0.3803, "accuracy": 0.8328125476837158, "lr": 3.46969696969697e-07, "epoch": 0.6885678980783161, "percentage": 68.85, "elapsed_time": "2:44:45", "remaining_time": "1:14:32"}
102
+ {"current_steps": 1020, "total_steps": 1467, "loss": 0.4016, "accuracy": 0.8218750357627869, "lr": 3.393939393939394e-07, "epoch": 0.6953854020196856, "percentage": 69.53, "elapsed_time": "2:46:21", "remaining_time": "1:12:54"}
103
+ {"current_steps": 1030, "total_steps": 1467, "loss": 0.434, "accuracy": 0.8156250715255737, "lr": 3.318181818181818e-07, "epoch": 0.7022029059610551, "percentage": 70.21, "elapsed_time": "2:47:57", "remaining_time": "1:11:15"}
104
+ {"current_steps": 1040, "total_steps": 1467, "loss": 0.4147, "accuracy": 0.8203125, "lr": 3.242424242424242e-07, "epoch": 0.7090204099024244, "percentage": 70.89, "elapsed_time": "2:49:35", "remaining_time": "1:09:38"}
105
+ {"current_steps": 1050, "total_steps": 1467, "loss": 0.4301, "accuracy": 0.8125, "lr": 3.166666666666666e-07, "epoch": 0.7158379138437939, "percentage": 71.57, "elapsed_time": "2:51:15", "remaining_time": "1:08:00"}
106
+ {"current_steps": 1060, "total_steps": 1467, "loss": 0.3642, "accuracy": 0.8578125238418579, "lr": 3.0909090909090907e-07, "epoch": 0.7226554177851634, "percentage": 72.26, "elapsed_time": "2:52:53", "remaining_time": "1:06:23"}
107
+ {"current_steps": 1070, "total_steps": 1467, "loss": 0.3929, "accuracy": 0.8359375, "lr": 3.015151515151515e-07, "epoch": 0.7294729217265329, "percentage": 72.94, "elapsed_time": "2:54:29", "remaining_time": "1:04:44"}
108
+ {"current_steps": 1080, "total_steps": 1467, "loss": 0.4123, "accuracy": 0.8218750357627869, "lr": 2.939393939393939e-07, "epoch": 0.7362904256679024, "percentage": 73.62, "elapsed_time": "2:56:04", "remaining_time": "1:03:05"}
109
+ {"current_steps": 1090, "total_steps": 1467, "loss": 0.3986, "accuracy": 0.831250011920929, "lr": 2.8636363636363637e-07, "epoch": 0.7431079296092719, "percentage": 74.3, "elapsed_time": "2:57:40", "remaining_time": "1:01:27"}
110
+ {"current_steps": 1100, "total_steps": 1467, "loss": 0.4219, "accuracy": 0.800000011920929, "lr": 2.787878787878788e-07, "epoch": 0.7499254335506412, "percentage": 74.98, "elapsed_time": "2:59:16", "remaining_time": "0:59:48"}
111
+ {"current_steps": 1110, "total_steps": 1467, "loss": 0.4178, "accuracy": 0.8109375238418579, "lr": 2.712121212121212e-07, "epoch": 0.7567429374920107, "percentage": 75.66, "elapsed_time": "3:00:50", "remaining_time": "0:58:09"}
112
+ {"current_steps": 1120, "total_steps": 1467, "loss": 0.4114, "accuracy": 0.8328125476837158, "lr": 2.636363636363636e-07, "epoch": 0.7635604414333802, "percentage": 76.35, "elapsed_time": "3:02:27", "remaining_time": "0:56:31"}
113
+ {"current_steps": 1130, "total_steps": 1467, "loss": 0.4146, "accuracy": 0.8125000596046448, "lr": 2.56060606060606e-07, "epoch": 0.7703779453747497, "percentage": 77.03, "elapsed_time": "3:04:02", "remaining_time": "0:54:53"}
114
+ {"current_steps": 1140, "total_steps": 1467, "loss": 0.4164, "accuracy": 0.8093750476837158, "lr": 2.4848484848484846e-07, "epoch": 0.7771954493161192, "percentage": 77.71, "elapsed_time": "3:05:39", "remaining_time": "0:53:15"}
115
+ {"current_steps": 1150, "total_steps": 1467, "loss": 0.4001, "accuracy": 0.828125, "lr": 2.409090909090909e-07, "epoch": 0.7840129532574887, "percentage": 78.39, "elapsed_time": "3:07:15", "remaining_time": "0:51:37"}
116
+ {"current_steps": 1160, "total_steps": 1467, "loss": 0.3736, "accuracy": 0.8374999761581421, "lr": 2.3333333333333333e-07, "epoch": 0.790830457198858, "percentage": 79.07, "elapsed_time": "3:08:52", "remaining_time": "0:49:59"}
117
+ {"current_steps": 1170, "total_steps": 1467, "loss": 0.3893, "accuracy": 0.8296875357627869, "lr": 2.2575757575757576e-07, "epoch": 0.7976479611402275, "percentage": 79.75, "elapsed_time": "3:10:29", "remaining_time": "0:48:21"}
118
+ {"current_steps": 1180, "total_steps": 1467, "loss": 0.4049, "accuracy": 0.8046875, "lr": 2.1818181818181815e-07, "epoch": 0.804465465081597, "percentage": 80.44, "elapsed_time": "3:12:04", "remaining_time": "0:46:42"}
119
+ {"current_steps": 1190, "total_steps": 1467, "loss": 0.4036, "accuracy": 0.8218750357627869, "lr": 2.106060606060606e-07, "epoch": 0.8112829690229665, "percentage": 81.12, "elapsed_time": "3:13:42", "remaining_time": "0:45:05"}
120
+ {"current_steps": 1200, "total_steps": 1467, "loss": 0.3761, "accuracy": 0.839062511920929, "lr": 2.0303030303030303e-07, "epoch": 0.818100472964336, "percentage": 81.8, "elapsed_time": "3:15:17", "remaining_time": "0:43:27"}
121
+ {"current_steps": 1210, "total_steps": 1467, "loss": 0.3763, "accuracy": 0.8343750238418579, "lr": 1.9545454545454545e-07, "epoch": 0.8249179769057055, "percentage": 82.48, "elapsed_time": "3:16:51", "remaining_time": "0:41:48"}
122
+ {"current_steps": 1220, "total_steps": 1467, "loss": 0.3691, "accuracy": 0.854687511920929, "lr": 1.8787878787878785e-07, "epoch": 0.8317354808470748, "percentage": 83.16, "elapsed_time": "3:18:28", "remaining_time": "0:40:10"}
123
+ {"current_steps": 1230, "total_steps": 1467, "loss": 0.3711, "accuracy": 0.8609374761581421, "lr": 1.803030303030303e-07, "epoch": 0.8385529847884443, "percentage": 83.84, "elapsed_time": "3:20:06", "remaining_time": "0:38:33"}
124
+ {"current_steps": 1240, "total_steps": 1467, "loss": 0.3913, "accuracy": 0.846875011920929, "lr": 1.7272727272727272e-07, "epoch": 0.8453704887298138, "percentage": 84.53, "elapsed_time": "3:21:43", "remaining_time": "0:36:55"}
125
+ {"current_steps": 1250, "total_steps": 1467, "loss": 0.3713, "accuracy": 0.84375, "lr": 1.6515151515151515e-07, "epoch": 0.8521879926711833, "percentage": 85.21, "elapsed_time": "3:23:19", "remaining_time": "0:35:17"}
126
+ {"current_steps": 1260, "total_steps": 1467, "loss": 0.355, "accuracy": 0.8531250357627869, "lr": 1.5757575757575757e-07, "epoch": 0.8590054966125528, "percentage": 85.89, "elapsed_time": "3:24:55", "remaining_time": "0:33:40"}
127
+ {"current_steps": 1270, "total_steps": 1467, "loss": 0.3747, "accuracy": 0.840624988079071, "lr": 1.5e-07, "epoch": 0.8658230005539223, "percentage": 86.57, "elapsed_time": "3:26:32", "remaining_time": "0:32:02"}
128
+ {"current_steps": 1280, "total_steps": 1467, "loss": 0.3437, "accuracy": 0.862500011920929, "lr": 1.4242424242424242e-07, "epoch": 0.8726405044952916, "percentage": 87.25, "elapsed_time": "3:28:08", "remaining_time": "0:30:24"}
129
+ {"current_steps": 1290, "total_steps": 1467, "loss": 0.3713, "accuracy": 0.8312500715255737, "lr": 1.3484848484848484e-07, "epoch": 0.8794580084366611, "percentage": 87.93, "elapsed_time": "3:29:45", "remaining_time": "0:28:46"}
130
+ {"current_steps": 1300, "total_steps": 1467, "loss": 0.3841, "accuracy": 0.8343750238418579, "lr": 1.2727272727272726e-07, "epoch": 0.8862755123780306, "percentage": 88.62, "elapsed_time": "3:31:21", "remaining_time": "0:27:09"}
131
+ {"current_steps": 1310, "total_steps": 1467, "loss": 0.3754, "accuracy": 0.831250011920929, "lr": 1.196969696969697e-07, "epoch": 0.8930930163194001, "percentage": 89.3, "elapsed_time": "3:32:56", "remaining_time": "0:25:31"}
132
+ {"current_steps": 1320, "total_steps": 1467, "loss": 0.4089, "accuracy": 0.8140624761581421, "lr": 1.1212121212121211e-07, "epoch": 0.8999105202607696, "percentage": 89.98, "elapsed_time": "3:34:33", "remaining_time": "0:23:53"}
133
+ {"current_steps": 1330, "total_steps": 1467, "loss": 0.4075, "accuracy": 0.8171875476837158, "lr": 1.0454545454545454e-07, "epoch": 0.9067280242021389, "percentage": 90.66, "elapsed_time": "3:36:10", "remaining_time": "0:22:16"}
134
+ {"current_steps": 1340, "total_steps": 1467, "loss": 0.3927, "accuracy": 0.828125, "lr": 9.696969696969696e-08, "epoch": 0.9135455281435084, "percentage": 91.34, "elapsed_time": "3:37:47", "remaining_time": "0:20:38"}
135
+ {"current_steps": 1350, "total_steps": 1467, "loss": 0.3625, "accuracy": 0.846875011920929, "lr": 8.93939393939394e-08, "epoch": 0.9203630320848779, "percentage": 92.02, "elapsed_time": "3:39:23", "remaining_time": "0:19:00"}
136
+ {"current_steps": 1360, "total_steps": 1467, "loss": 0.3859, "accuracy": 0.828125, "lr": 8.181818181818182e-08, "epoch": 0.9271805360262474, "percentage": 92.71, "elapsed_time": "3:40:57", "remaining_time": "0:17:23"}
137
+ {"current_steps": 1370, "total_steps": 1467, "loss": 0.41, "accuracy": 0.8093750476837158, "lr": 7.424242424242424e-08, "epoch": 0.9339980399676169, "percentage": 93.39, "elapsed_time": "3:42:35", "remaining_time": "0:15:45"}
138
+ {"current_steps": 1380, "total_steps": 1467, "loss": 0.3632, "accuracy": 0.8515625, "lr": 6.666666666666667e-08, "epoch": 0.9408155439089864, "percentage": 94.07, "elapsed_time": "3:44:12", "remaining_time": "0:14:08"}
139
+ {"current_steps": 1390, "total_steps": 1467, "loss": 0.3386, "accuracy": 0.8750000596046448, "lr": 5.9090909090909085e-08, "epoch": 0.9476330478503557, "percentage": 94.75, "elapsed_time": "3:45:49", "remaining_time": "0:12:30"}
140
+ {"current_steps": 1400, "total_steps": 1467, "loss": 0.3614, "accuracy": 0.8453124761581421, "lr": 5.151515151515151e-08, "epoch": 0.9544505517917252, "percentage": 95.43, "elapsed_time": "3:47:25", "remaining_time": "0:10:53"}
141
+ {"current_steps": 1410, "total_steps": 1467, "loss": 0.4114, "accuracy": 0.8218749761581421, "lr": 4.393939393939393e-08, "epoch": 0.9612680557330947, "percentage": 96.11, "elapsed_time": "3:49:00", "remaining_time": "0:09:15"}
142
+ {"current_steps": 1420, "total_steps": 1467, "loss": 0.3557, "accuracy": 0.846875011920929, "lr": 3.636363636363636e-08, "epoch": 0.9680855596744642, "percentage": 96.8, "elapsed_time": "3:50:35", "remaining_time": "0:07:37"}
143
+ {"current_steps": 1430, "total_steps": 1467, "loss": 0.3872, "accuracy": 0.8343750238418579, "lr": 2.8787878787878787e-08, "epoch": 0.9749030636158337, "percentage": 97.48, "elapsed_time": "3:52:11", "remaining_time": "0:06:00"}
144
+ {"current_steps": 1440, "total_steps": 1467, "loss": 0.3557, "accuracy": 0.859375, "lr": 2.1212121212121214e-08, "epoch": 0.9817205675572032, "percentage": 98.16, "elapsed_time": "3:53:47", "remaining_time": "0:04:23"}
145
+ {"current_steps": 1450, "total_steps": 1467, "loss": 0.3724, "accuracy": 0.8500000238418579, "lr": 1.3636363636363635e-08, "epoch": 0.9885380714985725, "percentage": 98.84, "elapsed_time": "3:55:24", "remaining_time": "0:02:45"}
146
+ {"current_steps": 1460, "total_steps": 1467, "loss": 0.3698, "accuracy": 0.848437488079071, "lr": 6.06060606060606e-09, "epoch": 0.995355575439942, "percentage": 99.52, "elapsed_time": "3:56:59", "remaining_time": "0:01:08"}
147
+ {"current_steps": 1467, "total_steps": 1467, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "3:58:30", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1467,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.006817503941369466,
14
+ "grad_norm": 20.121693308244087,
15
+ "learning_rate": 6.122448979591837e-08,
16
+ "logits/chosen": -0.013989130035042763,
17
+ "logits/rejected": 0.058542873710393906,
18
+ "logps/chosen": -190.215087890625,
19
+ "logps/rejected": -203.27479553222656,
20
+ "loss": 0.6922,
21
+ "rewards/accuracies": 0.4390625059604645,
22
+ "rewards/chosen": 0.0016301964642480016,
23
+ "rewards/margins": 0.0021729914005845785,
24
+ "rewards/rejected": -0.0005427949363365769,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.013635007882738932,
29
+ "grad_norm": 20.76481447180183,
30
+ "learning_rate": 1.2925170068027211e-07,
31
+ "logits/chosen": -0.006685615051537752,
32
+ "logits/rejected": 0.06347016990184784,
33
+ "logps/chosen": -191.0093994140625,
34
+ "logps/rejected": -203.0770263671875,
35
+ "loss": 0.6933,
36
+ "rewards/accuracies": 0.5015624761581421,
37
+ "rewards/chosen": -0.0009165612864308059,
38
+ "rewards/margins": 8.891527249943465e-05,
39
+ "rewards/rejected": -0.0010054768063127995,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.0204525118241084,
44
+ "grad_norm": 21.80914665550797,
45
+ "learning_rate": 1.9727891156462583e-07,
46
+ "logits/chosen": 0.02424698881804943,
47
+ "logits/rejected": 0.0787603035569191,
48
+ "logps/chosen": -188.7990264892578,
49
+ "logps/rejected": -198.6798095703125,
50
+ "loss": 0.6929,
51
+ "rewards/accuracies": 0.5093750357627869,
52
+ "rewards/chosen": 0.0011249443050473928,
53
+ "rewards/margins": 0.000932438881136477,
54
+ "rewards/rejected": 0.00019250542391091585,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.027270015765477863,
59
+ "grad_norm": 18.7019723893629,
60
+ "learning_rate": 2.653061224489796e-07,
61
+ "logits/chosen": -0.006816861219704151,
62
+ "logits/rejected": 0.05330298840999603,
63
+ "logps/chosen": -183.76039123535156,
64
+ "logps/rejected": -199.3633575439453,
65
+ "loss": 0.6925,
66
+ "rewards/accuracies": 0.520312488079071,
67
+ "rewards/chosen": -0.0010295719839632511,
68
+ "rewards/margins": 0.0016696588136255741,
69
+ "rewards/rejected": -0.0026992305647581816,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.03408751970684733,
74
+ "grad_norm": 20.831078202136776,
75
+ "learning_rate": 3.333333333333333e-07,
76
+ "logits/chosen": 0.03178512677550316,
77
+ "logits/rejected": 0.10444696992635727,
78
+ "logps/chosen": -182.8831329345703,
79
+ "logps/rejected": -193.09466552734375,
80
+ "loss": 0.6921,
81
+ "rewards/accuracies": 0.520312488079071,
82
+ "rewards/chosen": -0.0042633856646716595,
83
+ "rewards/margins": 0.0025998500641435385,
84
+ "rewards/rejected": -0.006863235495984554,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.0409050236482168,
89
+ "grad_norm": 23.46573004490643,
90
+ "learning_rate": 4.0136054421768705e-07,
91
+ "logits/chosen": -0.0085222776979208,
92
+ "logits/rejected": 0.04688471555709839,
93
+ "logps/chosen": -187.2247314453125,
94
+ "logps/rejected": -197.925537109375,
95
+ "loss": 0.6919,
96
+ "rewards/accuracies": 0.5218749642372131,
97
+ "rewards/chosen": -0.009058980271220207,
98
+ "rewards/margins": 0.003005079925060272,
99
+ "rewards/rejected": -0.01206406019628048,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.04772252758958626,
104
+ "grad_norm": 21.16825784724637,
105
+ "learning_rate": 4.693877551020408e-07,
106
+ "logits/chosen": 0.03674064576625824,
107
+ "logits/rejected": 0.10054312646389008,
108
+ "logps/chosen": -177.9295654296875,
109
+ "logps/rejected": -190.43357849121094,
110
+ "loss": 0.6919,
111
+ "rewards/accuracies": 0.5453125238418579,
112
+ "rewards/chosen": -0.014818010851740837,
113
+ "rewards/margins": 0.0029301545582711697,
114
+ "rewards/rejected": -0.017748164013028145,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.05454003153095573,
119
+ "grad_norm": 19.91582657292646,
120
+ "learning_rate": 5.374149659863945e-07,
121
+ "logits/chosen": 0.03953830525279045,
122
+ "logits/rejected": 0.0943986028432846,
123
+ "logps/chosen": -174.0701446533203,
124
+ "logps/rejected": -185.5764617919922,
125
+ "loss": 0.6897,
126
+ "rewards/accuracies": 0.5453125238418579,
127
+ "rewards/chosen": -0.02375047467648983,
128
+ "rewards/margins": 0.007541469298303127,
129
+ "rewards/rejected": -0.03129194676876068,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.0613575354723252,
134
+ "grad_norm": 19.184157083023965,
135
+ "learning_rate": 6.054421768707482e-07,
136
+ "logits/chosen": 0.041828252375125885,
137
+ "logits/rejected": 0.11053334176540375,
138
+ "logps/chosen": -180.23428344726562,
139
+ "logps/rejected": -192.7969207763672,
140
+ "loss": 0.6894,
141
+ "rewards/accuracies": 0.5656249523162842,
142
+ "rewards/chosen": -0.04171518608927727,
143
+ "rewards/margins": 0.00825162697583437,
144
+ "rewards/rejected": -0.04996681213378906,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.06817503941369465,
149
+ "grad_norm": 21.70613257034061,
150
+ "learning_rate": 6.734693877551019e-07,
151
+ "logits/chosen": 0.03590967878699303,
152
+ "logits/rejected": 0.12106480449438095,
153
+ "logps/chosen": -187.4181365966797,
154
+ "logps/rejected": -196.91404724121094,
155
+ "loss": 0.6858,
156
+ "rewards/accuracies": 0.620312511920929,
157
+ "rewards/chosen": -0.06401355564594269,
158
+ "rewards/margins": 0.015702249482274055,
159
+ "rewards/rejected": -0.07971581071615219,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07499254335506413,
164
+ "grad_norm": 21.340307822546738,
165
+ "learning_rate": 7.414965986394558e-07,
166
+ "logits/chosen": 0.054249007254838943,
167
+ "logits/rejected": 0.11480608582496643,
168
+ "logps/chosen": -198.7044219970703,
169
+ "logps/rejected": -212.5745086669922,
170
+ "loss": 0.6826,
171
+ "rewards/accuracies": 0.598437488079071,
172
+ "rewards/chosen": -0.10510613769292831,
173
+ "rewards/margins": 0.02307462878525257,
174
+ "rewards/rejected": -0.12818075716495514,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.0818100472964336,
179
+ "grad_norm": 19.224497336436897,
180
+ "learning_rate": 8.095238095238095e-07,
181
+ "logits/chosen": 0.09257032722234726,
182
+ "logits/rejected": 0.12628528475761414,
183
+ "logps/chosen": -186.2566680908203,
184
+ "logps/rejected": -198.2835235595703,
185
+ "loss": 0.6818,
186
+ "rewards/accuracies": 0.6015625596046448,
187
+ "rewards/chosen": -0.13667678833007812,
188
+ "rewards/margins": 0.02535596489906311,
189
+ "rewards/rejected": -0.16203275322914124,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08862755123780305,
194
+ "grad_norm": 21.13611235058464,
195
+ "learning_rate": 8.775510204081632e-07,
196
+ "logits/chosen": 0.09457506239414215,
197
+ "logits/rejected": 0.15074704587459564,
198
+ "logps/chosen": -194.18267822265625,
199
+ "logps/rejected": -206.7709503173828,
200
+ "loss": 0.6724,
201
+ "rewards/accuracies": 0.6031250357627869,
202
+ "rewards/chosen": -0.17589515447616577,
203
+ "rewards/margins": 0.04765651002526283,
204
+ "rewards/rejected": -0.2235516607761383,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09544505517917252,
209
+ "grad_norm": 19.76839160560583,
210
+ "learning_rate": 9.45578231292517e-07,
211
+ "logits/chosen": 0.11603380739688873,
212
+ "logits/rejected": 0.1540485918521881,
213
+ "logps/chosen": -194.67906188964844,
214
+ "logps/rejected": -201.3298797607422,
215
+ "loss": 0.6753,
216
+ "rewards/accuracies": 0.6421875357627869,
217
+ "rewards/chosen": -0.21486632525920868,
218
+ "rewards/margins": 0.04251245781779289,
219
+ "rewards/rejected": -0.25737878680229187,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.102262559120542,
224
+ "grad_norm": 20.781161821934894,
225
+ "learning_rate": 9.984848484848486e-07,
226
+ "logits/chosen": 0.18178227543830872,
227
+ "logits/rejected": 0.20421989262104034,
228
+ "logps/chosen": -194.18841552734375,
229
+ "logps/rejected": -205.5372314453125,
230
+ "loss": 0.6693,
231
+ "rewards/accuracies": 0.6109374761581421,
232
+ "rewards/chosen": -0.24953892827033997,
233
+ "rewards/margins": 0.05746041238307953,
234
+ "rewards/rejected": -0.3069993257522583,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.10908006306191145,
239
+ "grad_norm": 20.949425745434294,
240
+ "learning_rate": 9.909090909090909e-07,
241
+ "logits/chosen": 0.16577480733394623,
242
+ "logits/rejected": 0.22489143908023834,
243
+ "logps/chosen": -189.01882934570312,
244
+ "logps/rejected": -204.76612854003906,
245
+ "loss": 0.6553,
246
+ "rewards/accuracies": 0.6578125357627869,
247
+ "rewards/chosen": -0.2690110504627228,
248
+ "rewards/margins": 0.0968737006187439,
249
+ "rewards/rejected": -0.36588478088378906,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.11589756700328092,
254
+ "grad_norm": 21.376727693673736,
255
+ "learning_rate": 9.833333333333332e-07,
256
+ "logits/chosen": 0.16099530458450317,
257
+ "logits/rejected": 0.20968888700008392,
258
+ "logps/chosen": -198.27276611328125,
259
+ "logps/rejected": -207.08128356933594,
260
+ "loss": 0.6546,
261
+ "rewards/accuracies": 0.6500000357627869,
262
+ "rewards/chosen": -0.31624093651771545,
263
+ "rewards/margins": 0.09791112691164017,
264
+ "rewards/rejected": -0.4141520857810974,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.1227150709446504,
269
+ "grad_norm": 20.47632034356792,
270
+ "learning_rate": 9.757575757575757e-07,
271
+ "logits/chosen": 0.16175265610218048,
272
+ "logits/rejected": 0.24207058548927307,
273
+ "logps/chosen": -192.8699188232422,
274
+ "logps/rejected": -204.4312744140625,
275
+ "loss": 0.6469,
276
+ "rewards/accuracies": 0.6687500476837158,
277
+ "rewards/chosen": -0.3407444357872009,
278
+ "rewards/margins": 0.12092556804418564,
279
+ "rewards/rejected": -0.46167001128196716,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.12953257488601985,
284
+ "grad_norm": 20.746940996761676,
285
+ "learning_rate": 9.681818181818182e-07,
286
+ "logits/chosen": 0.15175826847553253,
287
+ "logits/rejected": 0.21674920618534088,
288
+ "logps/chosen": -193.29212951660156,
289
+ "logps/rejected": -209.36143493652344,
290
+ "loss": 0.6389,
291
+ "rewards/accuracies": 0.6812500357627869,
292
+ "rewards/chosen": -0.3786366581916809,
293
+ "rewards/margins": 0.1404908001422882,
294
+ "rewards/rejected": -0.5191274285316467,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.1363500788273893,
299
+ "grad_norm": 20.484642032996728,
300
+ "learning_rate": 9.606060606060605e-07,
301
+ "logits/chosen": 0.1607164442539215,
302
+ "logits/rejected": 0.22002199292182922,
303
+ "logps/chosen": -197.4151153564453,
304
+ "logps/rejected": -209.8327178955078,
305
+ "loss": 0.6291,
306
+ "rewards/accuracies": 0.6609375476837158,
307
+ "rewards/chosen": -0.41719570755958557,
308
+ "rewards/margins": 0.17708109319210052,
309
+ "rewards/rejected": -0.5942767858505249,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.1431675827687588,
314
+ "grad_norm": 26.738984065984987,
315
+ "learning_rate": 9.53030303030303e-07,
316
+ "logits/chosen": 0.15654993057250977,
317
+ "logits/rejected": 0.2388145625591278,
318
+ "logps/chosen": -195.02975463867188,
319
+ "logps/rejected": -207.19190979003906,
320
+ "loss": 0.6342,
321
+ "rewards/accuracies": 0.6749999523162842,
322
+ "rewards/chosen": -0.4655718505382538,
323
+ "rewards/margins": 0.16476726531982422,
324
+ "rewards/rejected": -0.6303391456604004,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.14998508671012825,
329
+ "grad_norm": 20.33866123420931,
330
+ "learning_rate": 9.454545454545454e-07,
331
+ "logits/chosen": 0.12783432006835938,
332
+ "logits/rejected": 0.1976049840450287,
333
+ "logps/chosen": -201.7896728515625,
334
+ "logps/rejected": -215.41249084472656,
335
+ "loss": 0.6291,
336
+ "rewards/accuracies": 0.6609375476837158,
337
+ "rewards/chosen": -0.5083937644958496,
338
+ "rewards/margins": 0.18992076814174652,
339
+ "rewards/rejected": -0.6983146071434021,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.1568025906514977,
344
+ "grad_norm": 32.54405565292402,
345
+ "learning_rate": 9.378787878787879e-07,
346
+ "logits/chosen": 0.1527099907398224,
347
+ "logits/rejected": 0.22111022472381592,
348
+ "logps/chosen": -193.4207763671875,
349
+ "logps/rejected": -207.85169982910156,
350
+ "loss": 0.6212,
351
+ "rewards/accuracies": 0.690625011920929,
352
+ "rewards/chosen": -0.531213641166687,
353
+ "rewards/margins": 0.2235802412033081,
354
+ "rewards/rejected": -0.7547938823699951,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.1636200945928672,
359
+ "grad_norm": 19.095273306756834,
360
+ "learning_rate": 9.303030303030303e-07,
361
+ "logits/chosen": 0.15274283289909363,
362
+ "logits/rejected": 0.21214556694030762,
363
+ "logps/chosen": -196.24371337890625,
364
+ "logps/rejected": -213.04237365722656,
365
+ "loss": 0.6085,
366
+ "rewards/accuracies": 0.6796875,
367
+ "rewards/chosen": -0.5204161405563354,
368
+ "rewards/margins": 0.2579624056816101,
369
+ "rewards/rejected": -0.7783786058425903,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17043759853423665,
374
+ "grad_norm": 22.735594447037276,
375
+ "learning_rate": 9.227272727272727e-07,
376
+ "logits/chosen": 0.1225215271115303,
377
+ "logits/rejected": 0.18497014045715332,
378
+ "logps/chosen": -192.548095703125,
379
+ "logps/rejected": -207.9135284423828,
380
+ "loss": 0.6233,
381
+ "rewards/accuracies": 0.667187511920929,
382
+ "rewards/chosen": -0.5648759603500366,
383
+ "rewards/margins": 0.2269633412361145,
384
+ "rewards/rejected": -0.7918393611907959,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.1772551024756061,
389
+ "grad_norm": 22.954060831129915,
390
+ "learning_rate": 9.151515151515152e-07,
391
+ "logits/chosen": 0.1713658571243286,
392
+ "logits/rejected": 0.25986677408218384,
393
+ "logps/chosen": -198.60391235351562,
394
+ "logps/rejected": -214.2535400390625,
395
+ "loss": 0.6071,
396
+ "rewards/accuracies": 0.6890625357627869,
397
+ "rewards/chosen": -0.6113271713256836,
398
+ "rewards/margins": 0.27784913778305054,
399
+ "rewards/rejected": -0.8891763091087341,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.1840726064169756,
404
+ "grad_norm": 22.190753794677473,
405
+ "learning_rate": 9.075757575757576e-07,
406
+ "logits/chosen": 0.14579366147518158,
407
+ "logits/rejected": 0.20252245664596558,
408
+ "logps/chosen": -199.14405822753906,
409
+ "logps/rejected": -213.55294799804688,
410
+ "loss": 0.6017,
411
+ "rewards/accuracies": 0.6906250715255737,
412
+ "rewards/chosen": -0.5976826548576355,
413
+ "rewards/margins": 0.3174746632575989,
414
+ "rewards/rejected": -0.9151572585105896,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19089011035834505,
419
+ "grad_norm": 22.054250481854893,
420
+ "learning_rate": 9e-07,
421
+ "logits/chosen": 0.11682489514350891,
422
+ "logits/rejected": 0.18400567770004272,
423
+ "logps/chosen": -195.43438720703125,
424
+ "logps/rejected": -214.8118896484375,
425
+ "loss": 0.6076,
426
+ "rewards/accuracies": 0.6781250238418579,
427
+ "rewards/chosen": -0.600721001625061,
428
+ "rewards/margins": 0.29136893153190613,
429
+ "rewards/rejected": -0.89208984375,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.1977076142997145,
434
+ "grad_norm": 24.117104152592596,
435
+ "learning_rate": 8.924242424242425e-07,
436
+ "logits/chosen": 0.08254396170377731,
437
+ "logits/rejected": 0.15450119972229004,
438
+ "logps/chosen": -202.29647827148438,
439
+ "logps/rejected": -221.5592498779297,
440
+ "loss": 0.5958,
441
+ "rewards/accuracies": 0.6859375238418579,
442
+ "rewards/chosen": -0.5900746583938599,
443
+ "rewards/margins": 0.34352385997772217,
444
+ "rewards/rejected": -0.9335983991622925,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.204525118241084,
449
+ "grad_norm": 21.604244584329482,
450
+ "learning_rate": 8.848484848484849e-07,
451
+ "logits/chosen": 0.08819441497325897,
452
+ "logits/rejected": 0.17239636182785034,
453
+ "logps/chosen": -188.81192016601562,
454
+ "logps/rejected": -208.72073364257812,
455
+ "loss": 0.569,
456
+ "rewards/accuracies": 0.7328125238418579,
457
+ "rewards/chosen": -0.6129291653633118,
458
+ "rewards/margins": 0.4271809160709381,
459
+ "rewards/rejected": -1.0401101112365723,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.21134262218245345,
464
+ "grad_norm": 22.020365760713695,
465
+ "learning_rate": 8.772727272727273e-07,
466
+ "logits/chosen": 0.06760307401418686,
467
+ "logits/rejected": 0.14344710111618042,
468
+ "logps/chosen": -195.82293701171875,
469
+ "logps/rejected": -212.59982299804688,
470
+ "loss": 0.5687,
471
+ "rewards/accuracies": 0.7140624523162842,
472
+ "rewards/chosen": -0.6414520740509033,
473
+ "rewards/margins": 0.4150450825691223,
474
+ "rewards/rejected": -1.0564970970153809,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.2181601261238229,
479
+ "grad_norm": 19.161894841909444,
480
+ "learning_rate": 8.696969696969697e-07,
481
+ "logits/chosen": 0.11280106008052826,
482
+ "logits/rejected": 0.18791824579238892,
483
+ "logps/chosen": -209.43258666992188,
484
+ "logps/rejected": -236.70346069335938,
485
+ "loss": 0.5635,
486
+ "rewards/accuracies": 0.7421875,
487
+ "rewards/chosen": -0.74039626121521,
488
+ "rewards/margins": 0.5291692018508911,
489
+ "rewards/rejected": -1.269565463066101,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.2249776300651924,
494
+ "grad_norm": 22.3343774818719,
495
+ "learning_rate": 8.62121212121212e-07,
496
+ "logits/chosen": 0.09679359942674637,
497
+ "logits/rejected": 0.19822388887405396,
498
+ "logps/chosen": -208.64476013183594,
499
+ "logps/rejected": -229.81011962890625,
500
+ "loss": 0.5596,
501
+ "rewards/accuracies": 0.7343750596046448,
502
+ "rewards/chosen": -0.7948130965232849,
503
+ "rewards/margins": 0.5681655406951904,
504
+ "rewards/rejected": -1.3629785776138306,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.23179513400656185,
509
+ "grad_norm": 20.045843226629753,
510
+ "learning_rate": 8.545454545454544e-07,
511
+ "logits/chosen": 0.03968825936317444,
512
+ "logits/rejected": 0.12135367095470428,
513
+ "logps/chosen": -207.46609497070312,
514
+ "logps/rejected": -224.89439392089844,
515
+ "loss": 0.5679,
516
+ "rewards/accuracies": 0.7046875357627869,
517
+ "rewards/chosen": -0.7988042831420898,
518
+ "rewards/margins": 0.4717750549316406,
519
+ "rewards/rejected": -1.2705793380737305,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.2386126379479313,
524
+ "grad_norm": 30.125557475346305,
525
+ "learning_rate": 8.469696969696968e-07,
526
+ "logits/chosen": 0.10933436453342438,
527
+ "logits/rejected": 0.1516590416431427,
528
+ "logps/chosen": -201.27694702148438,
529
+ "logps/rejected": -218.70889282226562,
530
+ "loss": 0.5859,
531
+ "rewards/accuracies": 0.6796875,
532
+ "rewards/chosen": -0.8309043645858765,
533
+ "rewards/margins": 0.49856728315353394,
534
+ "rewards/rejected": -1.3294715881347656,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.2454301418893008,
539
+ "grad_norm": 22.893234913534002,
540
+ "learning_rate": 8.393939393939393e-07,
541
+ "logits/chosen": 0.062410831451416016,
542
+ "logits/rejected": 0.13367854058742523,
543
+ "logps/chosen": -194.7364959716797,
544
+ "logps/rejected": -215.7635040283203,
545
+ "loss": 0.5739,
546
+ "rewards/accuracies": 0.698437511920929,
547
+ "rewards/chosen": -0.7721937894821167,
548
+ "rewards/margins": 0.49258309602737427,
549
+ "rewards/rejected": -1.2647769451141357,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.2522476458306702,
554
+ "grad_norm": 22.23618919017924,
555
+ "learning_rate": 8.318181818181817e-07,
556
+ "logits/chosen": 0.04097752273082733,
557
+ "logits/rejected": 0.11259806156158447,
558
+ "logps/chosen": -193.11825561523438,
559
+ "logps/rejected": -220.61949157714844,
560
+ "loss": 0.5378,
561
+ "rewards/accuracies": 0.7234375476837158,
562
+ "rewards/chosen": -0.7389846444129944,
563
+ "rewards/margins": 0.5330405831336975,
564
+ "rewards/rejected": -1.272025227546692,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.2590651497720397,
569
+ "grad_norm": 21.834653461400006,
570
+ "learning_rate": 8.242424242424241e-07,
571
+ "logits/chosen": 0.05887192115187645,
572
+ "logits/rejected": 0.1328059434890747,
573
+ "logps/chosen": -202.1223602294922,
574
+ "logps/rejected": -221.07273864746094,
575
+ "loss": 0.5508,
576
+ "rewards/accuracies": 0.7296874523162842,
577
+ "rewards/chosen": -0.900775671005249,
578
+ "rewards/margins": 0.587921142578125,
579
+ "rewards/rejected": -1.488696813583374,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.2658826537134092,
584
+ "grad_norm": 23.992274309591316,
585
+ "learning_rate": 8.166666666666666e-07,
586
+ "logits/chosen": 0.03438958153128624,
587
+ "logits/rejected": 0.12133367359638214,
588
+ "logps/chosen": -202.35487365722656,
589
+ "logps/rejected": -227.4732208251953,
590
+ "loss": 0.5618,
591
+ "rewards/accuracies": 0.715624988079071,
592
+ "rewards/chosen": -0.9070041179656982,
593
+ "rewards/margins": 0.6916414499282837,
594
+ "rewards/rejected": -1.598645567893982,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.2727001576547786,
599
+ "grad_norm": 22.0860813112572,
600
+ "learning_rate": 8.09090909090909e-07,
601
+ "logits/chosen": 0.08829227089881897,
602
+ "logits/rejected": 0.1640704870223999,
603
+ "logps/chosen": -205.60989379882812,
604
+ "logps/rejected": -224.852294921875,
605
+ "loss": 0.5494,
606
+ "rewards/accuracies": 0.721875011920929,
607
+ "rewards/chosen": -0.9454193115234375,
608
+ "rewards/margins": 0.6362107396125793,
609
+ "rewards/rejected": -1.5816301107406616,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.2795176615961481,
614
+ "grad_norm": 23.160379012581956,
615
+ "learning_rate": 8.015151515151514e-07,
616
+ "logits/chosen": 0.06336803734302521,
617
+ "logits/rejected": 0.13648778200149536,
618
+ "logps/chosen": -205.41561889648438,
619
+ "logps/rejected": -228.48057556152344,
620
+ "loss": 0.5462,
621
+ "rewards/accuracies": 0.7343750596046448,
622
+ "rewards/chosen": -0.9269916415214539,
623
+ "rewards/margins": 0.6442463994026184,
624
+ "rewards/rejected": -1.5712381601333618,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.2863351655375176,
629
+ "grad_norm": 22.408557079555997,
630
+ "learning_rate": 7.939393939393939e-07,
631
+ "logits/chosen": 0.08505380898714066,
632
+ "logits/rejected": 0.18310996890068054,
633
+ "logps/chosen": -213.0338897705078,
634
+ "logps/rejected": -235.76596069335938,
635
+ "loss": 0.5532,
636
+ "rewards/accuracies": 0.7265625,
637
+ "rewards/chosen": -1.0196318626403809,
638
+ "rewards/margins": 0.5743885040283203,
639
+ "rewards/rejected": -1.5940203666687012,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.293152669478887,
644
+ "grad_norm": 25.16391126214167,
645
+ "learning_rate": 7.863636363636363e-07,
646
+ "logits/chosen": 0.10937841981649399,
647
+ "logits/rejected": 0.16763341426849365,
648
+ "logps/chosen": -207.480224609375,
649
+ "logps/rejected": -234.79544067382812,
650
+ "loss": 0.5562,
651
+ "rewards/accuracies": 0.703125,
652
+ "rewards/chosen": -0.9945791363716125,
653
+ "rewards/margins": 0.7221311330795288,
654
+ "rewards/rejected": -1.7167102098464966,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.2999701734202565,
659
+ "grad_norm": 29.733679778009286,
660
+ "learning_rate": 7.787878787878787e-07,
661
+ "logits/chosen": 0.09990985691547394,
662
+ "logits/rejected": 0.19938966631889343,
663
+ "logps/chosen": -207.3507843017578,
664
+ "logps/rejected": -230.52630615234375,
665
+ "loss": 0.5384,
666
+ "rewards/accuracies": 0.75,
667
+ "rewards/chosen": -0.9231570959091187,
668
+ "rewards/margins": 0.6046810746192932,
669
+ "rewards/rejected": -1.527838110923767,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.306787677361626,
674
+ "grad_norm": 19.72755564195352,
675
+ "learning_rate": 7.712121212121212e-07,
676
+ "logits/chosen": 0.14025147259235382,
677
+ "logits/rejected": 0.1931421309709549,
678
+ "logps/chosen": -211.64739990234375,
679
+ "logps/rejected": -234.59742736816406,
680
+ "loss": 0.539,
681
+ "rewards/accuracies": 0.734375,
682
+ "rewards/chosen": -0.9828760623931885,
683
+ "rewards/margins": 0.6804162263870239,
684
+ "rewards/rejected": -1.6632922887802124,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.3136051813029954,
689
+ "grad_norm": 23.978166468246467,
690
+ "learning_rate": 7.636363636363636e-07,
691
+ "logits/chosen": 0.0902441218495369,
692
+ "logits/rejected": 0.18330176174640656,
693
+ "logps/chosen": -212.70648193359375,
694
+ "logps/rejected": -237.41574096679688,
695
+ "loss": 0.5309,
696
+ "rewards/accuracies": 0.7468750476837158,
697
+ "rewards/chosen": -1.0690429210662842,
698
+ "rewards/margins": 0.6713231801986694,
699
+ "rewards/rejected": -1.740365982055664,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.3204226852443649,
704
+ "grad_norm": 26.207432691612087,
705
+ "learning_rate": 7.56060606060606e-07,
706
+ "logits/chosen": 0.11341211199760437,
707
+ "logits/rejected": 0.1867765188217163,
708
+ "logps/chosen": -197.91021728515625,
709
+ "logps/rejected": -221.50146484375,
710
+ "loss": 0.5343,
711
+ "rewards/accuracies": 0.7562500238418579,
712
+ "rewards/chosen": -0.9720097780227661,
713
+ "rewards/margins": 0.6199135780334473,
714
+ "rewards/rejected": -1.5919233560562134,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.3272401891857344,
719
+ "grad_norm": 21.846418443949897,
720
+ "learning_rate": 7.484848484848485e-07,
721
+ "logits/chosen": 0.11702318489551544,
722
+ "logits/rejected": 0.203329399228096,
723
+ "logps/chosen": -201.898193359375,
724
+ "logps/rejected": -221.0704803466797,
725
+ "loss": 0.5479,
726
+ "rewards/accuracies": 0.7437500357627869,
727
+ "rewards/chosen": -0.9688056111335754,
728
+ "rewards/margins": 0.7794600129127502,
729
+ "rewards/rejected": -1.7482655048370361,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.3340576931271038,
734
+ "grad_norm": 23.690713241273283,
735
+ "learning_rate": 7.409090909090909e-07,
736
+ "logits/chosen": 0.10431469976902008,
737
+ "logits/rejected": 0.20410987734794617,
738
+ "logps/chosen": -212.93824768066406,
739
+ "logps/rejected": -240.54246520996094,
740
+ "loss": 0.5022,
741
+ "rewards/accuracies": 0.7562500238418579,
742
+ "rewards/chosen": -0.9758983850479126,
743
+ "rewards/margins": 0.8021982312202454,
744
+ "rewards/rejected": -1.7780965566635132,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.3408751970684733,
749
+ "grad_norm": 23.75624324955974,
750
+ "learning_rate": 7.333333333333332e-07,
751
+ "logits/chosen": 0.0590752549469471,
752
+ "logits/rejected": 0.15686756372451782,
753
+ "logps/chosen": -204.53347778320312,
754
+ "logps/rejected": -232.50863647460938,
755
+ "loss": 0.4879,
756
+ "rewards/accuracies": 0.7875000238418579,
757
+ "rewards/chosen": -0.9903222322463989,
758
+ "rewards/margins": 0.8048741817474365,
759
+ "rewards/rejected": -1.7951964139938354,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.3476927010098428,
764
+ "grad_norm": 24.670166077373448,
765
+ "learning_rate": 7.257575757575756e-07,
766
+ "logits/chosen": 0.04646120220422745,
767
+ "logits/rejected": 0.1389884203672409,
768
+ "logps/chosen": -216.92431640625,
769
+ "logps/rejected": -242.18240356445312,
770
+ "loss": 0.5303,
771
+ "rewards/accuracies": 0.7312500476837158,
772
+ "rewards/chosen": -1.1649525165557861,
773
+ "rewards/margins": 0.7863146066665649,
774
+ "rewards/rejected": -1.9512672424316406,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.3545102049512122,
779
+ "grad_norm": 23.363016624074675,
780
+ "learning_rate": 7.181818181818181e-07,
781
+ "logits/chosen": -0.012276587076485157,
782
+ "logits/rejected": 0.07009466737508774,
783
+ "logps/chosen": -205.48934936523438,
784
+ "logps/rejected": -232.42538452148438,
785
+ "loss": 0.5015,
786
+ "rewards/accuracies": 0.7671874761581421,
787
+ "rewards/chosen": -1.0472114086151123,
788
+ "rewards/margins": 0.788646399974823,
789
+ "rewards/rejected": -1.83585786819458,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.3613277088925817,
794
+ "grad_norm": 25.288611630037565,
795
+ "learning_rate": 7.106060606060605e-07,
796
+ "logits/chosen": -0.012474373914301395,
797
+ "logits/rejected": 0.05413222685456276,
798
+ "logps/chosen": -208.1520538330078,
799
+ "logps/rejected": -241.48385620117188,
800
+ "loss": 0.5252,
801
+ "rewards/accuracies": 0.746874988079071,
802
+ "rewards/chosen": -1.1651169061660767,
803
+ "rewards/margins": 1.0477391481399536,
804
+ "rewards/rejected": -2.2128560543060303,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.3681452128339512,
809
+ "grad_norm": 20.178014539662176,
810
+ "learning_rate": 7.030303030303029e-07,
811
+ "logits/chosen": -0.04357679560780525,
812
+ "logits/rejected": 0.0625062957406044,
813
+ "logps/chosen": -210.49520874023438,
814
+ "logps/rejected": -241.66493225097656,
815
+ "loss": 0.4807,
816
+ "rewards/accuracies": 0.793749988079071,
817
+ "rewards/chosen": -1.1151983737945557,
818
+ "rewards/margins": 0.9497561454772949,
819
+ "rewards/rejected": -2.0649547576904297,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.3749627167753206,
824
+ "grad_norm": 21.944400967201336,
825
+ "learning_rate": 6.954545454545454e-07,
826
+ "logits/chosen": 0.004060904495418072,
827
+ "logits/rejected": 0.08822981268167496,
828
+ "logps/chosen": -207.4363250732422,
829
+ "logps/rejected": -234.25550842285156,
830
+ "loss": 0.501,
831
+ "rewards/accuracies": 0.765625,
832
+ "rewards/chosen": -1.2111350297927856,
833
+ "rewards/margins": 0.8183422684669495,
834
+ "rewards/rejected": -2.02947735786438,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.3817802207166901,
839
+ "grad_norm": 26.850310787153724,
840
+ "learning_rate": 6.878787878787878e-07,
841
+ "logits/chosen": -0.017100585624575615,
842
+ "logits/rejected": 0.07575605064630508,
843
+ "logps/chosen": -213.30137634277344,
844
+ "logps/rejected": -243.35186767578125,
845
+ "loss": 0.4941,
846
+ "rewards/accuracies": 0.7593750357627869,
847
+ "rewards/chosen": -1.3406263589859009,
848
+ "rewards/margins": 0.9297415018081665,
849
+ "rewards/rejected": -2.2703678607940674,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.3885977246580596,
854
+ "grad_norm": 25.982278946370197,
855
+ "learning_rate": 6.803030303030302e-07,
856
+ "logits/chosen": 0.019733965396881104,
857
+ "logits/rejected": 0.07434576749801636,
858
+ "logps/chosen": -220.54368591308594,
859
+ "logps/rejected": -245.9012451171875,
860
+ "loss": 0.5234,
861
+ "rewards/accuracies": 0.7406250238418579,
862
+ "rewards/chosen": -1.3345239162445068,
863
+ "rewards/margins": 0.882716953754425,
864
+ "rewards/rejected": -2.217240810394287,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.395415228599429,
869
+ "grad_norm": 30.324292712841515,
870
+ "learning_rate": 6.727272727272727e-07,
871
+ "logits/chosen": -0.07648750394582748,
872
+ "logits/rejected": 0.013701358810067177,
873
+ "logps/chosen": -215.10986328125,
874
+ "logps/rejected": -241.52174377441406,
875
+ "loss": 0.4888,
876
+ "rewards/accuracies": 0.785937488079071,
877
+ "rewards/chosen": -1.2312657833099365,
878
+ "rewards/margins": 0.8834339380264282,
879
+ "rewards/rejected": -2.1146998405456543,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.4022327325407985,
884
+ "grad_norm": 30.267248385209463,
885
+ "learning_rate": 6.651515151515151e-07,
886
+ "logits/chosen": -0.08248546719551086,
887
+ "logits/rejected": 0.012668056413531303,
888
+ "logps/chosen": -217.48597717285156,
889
+ "logps/rejected": -248.1539306640625,
890
+ "loss": 0.4718,
891
+ "rewards/accuracies": 0.770312488079071,
892
+ "rewards/chosen": -1.3211320638656616,
893
+ "rewards/margins": 1.034571886062622,
894
+ "rewards/rejected": -2.3557040691375732,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.409050236482168,
899
+ "grad_norm": 33.7927835163272,
900
+ "learning_rate": 6.575757575757575e-07,
901
+ "logits/chosen": -0.10192164778709412,
902
+ "logits/rejected": -0.015221836045384407,
903
+ "logps/chosen": -217.5685577392578,
904
+ "logps/rejected": -255.0595703125,
905
+ "loss": 0.503,
906
+ "rewards/accuracies": 0.75,
907
+ "rewards/chosen": -1.4013419151306152,
908
+ "rewards/margins": 1.1709883213043213,
909
+ "rewards/rejected": -2.5723299980163574,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.4158677404235374,
914
+ "grad_norm": 26.97423161614756,
915
+ "learning_rate": 6.5e-07,
916
+ "logits/chosen": -0.09173352271318436,
917
+ "logits/rejected": -0.004754798021167517,
918
+ "logps/chosen": -214.8068084716797,
919
+ "logps/rejected": -247.6568145751953,
920
+ "loss": 0.4939,
921
+ "rewards/accuracies": 0.753125011920929,
922
+ "rewards/chosen": -1.280412197113037,
923
+ "rewards/margins": 1.0068598985671997,
924
+ "rewards/rejected": -2.2872722148895264,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.4226852443649069,
929
+ "grad_norm": 23.353042033965973,
930
+ "learning_rate": 6.424242424242424e-07,
931
+ "logits/chosen": -0.08759984374046326,
932
+ "logits/rejected": -0.006561200134456158,
933
+ "logps/chosen": -209.99142456054688,
934
+ "logps/rejected": -240.59873962402344,
935
+ "loss": 0.5032,
936
+ "rewards/accuracies": 0.7640625238418579,
937
+ "rewards/chosen": -1.327502965927124,
938
+ "rewards/margins": 0.8928775191307068,
939
+ "rewards/rejected": -2.2203807830810547,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.4295027483062764,
944
+ "grad_norm": 19.622319808534836,
945
+ "learning_rate": 6.348484848484848e-07,
946
+ "logits/chosen": -0.072292260825634,
947
+ "logits/rejected": 0.022778620943427086,
948
+ "logps/chosen": -208.22052001953125,
949
+ "logps/rejected": -243.78250122070312,
950
+ "loss": 0.4516,
951
+ "rewards/accuracies": 0.7953125238418579,
952
+ "rewards/chosen": -1.2082226276397705,
953
+ "rewards/margins": 1.1600842475891113,
954
+ "rewards/rejected": -2.368306875228882,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.4363202522476458,
959
+ "grad_norm": 27.54001459206905,
960
+ "learning_rate": 6.272727272727273e-07,
961
+ "logits/chosen": -0.07350125908851624,
962
+ "logits/rejected": 0.0216854028403759,
963
+ "logps/chosen": -214.49398803710938,
964
+ "logps/rejected": -245.44357299804688,
965
+ "loss": 0.481,
966
+ "rewards/accuracies": 0.7671875357627869,
967
+ "rewards/chosen": -1.3779951333999634,
968
+ "rewards/margins": 1.1042989492416382,
969
+ "rewards/rejected": -2.4822940826416016,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.4431377561890153,
974
+ "grad_norm": 22.005523605032646,
975
+ "learning_rate": 6.196969696969697e-07,
976
+ "logits/chosen": -0.08112622797489166,
977
+ "logits/rejected": 0.016506649553775787,
978
+ "logps/chosen": -207.86119079589844,
979
+ "logps/rejected": -243.9912109375,
980
+ "loss": 0.4669,
981
+ "rewards/accuracies": 0.776562511920929,
982
+ "rewards/chosen": -1.312855839729309,
983
+ "rewards/margins": 1.1270496845245361,
984
+ "rewards/rejected": -2.4399054050445557,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.4499552601303848,
989
+ "grad_norm": 22.786232365472497,
990
+ "learning_rate": 6.12121212121212e-07,
991
+ "logits/chosen": -0.014887440949678421,
992
+ "logits/rejected": 0.07256890088319778,
993
+ "logps/chosen": -214.85989379882812,
994
+ "logps/rejected": -242.47958374023438,
995
+ "loss": 0.5139,
996
+ "rewards/accuracies": 0.7328125238418579,
997
+ "rewards/chosen": -1.3719854354858398,
998
+ "rewards/margins": 0.9033377766609192,
999
+ "rewards/rejected": -2.2753231525421143,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.4567727640717542,
1004
+ "grad_norm": 22.124194779218676,
1005
+ "learning_rate": 6.045454545454545e-07,
1006
+ "logits/chosen": -0.06873725354671478,
1007
+ "logits/rejected": 0.01612996682524681,
1008
+ "logps/chosen": -214.2239990234375,
1009
+ "logps/rejected": -245.57972717285156,
1010
+ "loss": 0.4842,
1011
+ "rewards/accuracies": 0.7515625357627869,
1012
+ "rewards/chosen": -1.374406099319458,
1013
+ "rewards/margins": 1.0592212677001953,
1014
+ "rewards/rejected": -2.4336276054382324,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.4635902680131237,
1019
+ "grad_norm": 25.208264673147376,
1020
+ "learning_rate": 5.969696969696969e-07,
1021
+ "logits/chosen": -0.08258620649576187,
1022
+ "logits/rejected": 0.019038595259189606,
1023
+ "logps/chosen": -212.36058044433594,
1024
+ "logps/rejected": -247.52285766601562,
1025
+ "loss": 0.4707,
1026
+ "rewards/accuracies": 0.7796874642372131,
1027
+ "rewards/chosen": -1.355445384979248,
1028
+ "rewards/margins": 1.0607233047485352,
1029
+ "rewards/rejected": -2.416168689727783,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.4704077719544932,
1034
+ "grad_norm": 23.492565569306137,
1035
+ "learning_rate": 5.893939393939393e-07,
1036
+ "logits/chosen": -0.048173777759075165,
1037
+ "logits/rejected": 0.05654379725456238,
1038
+ "logps/chosen": -203.7908935546875,
1039
+ "logps/rejected": -235.70806884765625,
1040
+ "loss": 0.463,
1041
+ "rewards/accuracies": 0.7640625238418579,
1042
+ "rewards/chosen": -1.3086433410644531,
1043
+ "rewards/margins": 1.073492407798767,
1044
+ "rewards/rejected": -2.3821358680725098,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.4772252758958626,
1049
+ "grad_norm": 23.525844296596986,
1050
+ "learning_rate": 5.818181818181818e-07,
1051
+ "logits/chosen": -0.06435231864452362,
1052
+ "logits/rejected": 0.036002036184072495,
1053
+ "logps/chosen": -215.8461456298828,
1054
+ "logps/rejected": -253.51507568359375,
1055
+ "loss": 0.4424,
1056
+ "rewards/accuracies": 0.7937500476837158,
1057
+ "rewards/chosen": -1.304274320602417,
1058
+ "rewards/margins": 1.1241943836212158,
1059
+ "rewards/rejected": -2.428468704223633,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.4840427798372321,
1064
+ "grad_norm": 24.839200055423007,
1065
+ "learning_rate": 5.742424242424242e-07,
1066
+ "logits/chosen": -0.1140328049659729,
1067
+ "logits/rejected": -0.0182164516299963,
1068
+ "logps/chosen": -219.0977020263672,
1069
+ "logps/rejected": -245.15292358398438,
1070
+ "loss": 0.4667,
1071
+ "rewards/accuracies": 0.7796875238418579,
1072
+ "rewards/chosen": -1.4004441499710083,
1073
+ "rewards/margins": 1.1077167987823486,
1074
+ "rewards/rejected": -2.5081608295440674,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.4908602837786016,
1079
+ "grad_norm": 21.081539741219093,
1080
+ "learning_rate": 5.666666666666666e-07,
1081
+ "logits/chosen": -0.12187488377094269,
1082
+ "logits/rejected": -0.013158449903130531,
1083
+ "logps/chosen": -209.8751220703125,
1084
+ "logps/rejected": -246.4436492919922,
1085
+ "loss": 0.4639,
1086
+ "rewards/accuracies": 0.784375011920929,
1087
+ "rewards/chosen": -1.422644853591919,
1088
+ "rewards/margins": 1.1023151874542236,
1089
+ "rewards/rejected": -2.5249602794647217,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.497677787719971,
1094
+ "grad_norm": 24.782821613085822,
1095
+ "learning_rate": 5.590909090909091e-07,
1096
+ "logits/chosen": -0.06877341866493225,
1097
+ "logits/rejected": 0.035405777394771576,
1098
+ "logps/chosen": -216.97332763671875,
1099
+ "logps/rejected": -256.8809814453125,
1100
+ "loss": 0.4337,
1101
+ "rewards/accuracies": 0.8078125715255737,
1102
+ "rewards/chosen": -1.4306436777114868,
1103
+ "rewards/margins": 1.1720441579818726,
1104
+ "rewards/rejected": -2.6026878356933594,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5044952916613404,
1109
+ "grad_norm": 24.588552599891635,
1110
+ "learning_rate": 5.515151515151515e-07,
1111
+ "logits/chosen": -0.07467488199472427,
1112
+ "logits/rejected": 0.017803018912672997,
1113
+ "logps/chosen": -230.01266479492188,
1114
+ "logps/rejected": -262.2432861328125,
1115
+ "loss": 0.4669,
1116
+ "rewards/accuracies": 0.762499988079071,
1117
+ "rewards/chosen": -1.632286548614502,
1118
+ "rewards/margins": 1.2193667888641357,
1119
+ "rewards/rejected": -2.8516533374786377,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5113127956027099,
1124
+ "grad_norm": 20.475854944533108,
1125
+ "learning_rate": 5.439393939393939e-07,
1126
+ "logits/chosen": -0.07244399189949036,
1127
+ "logits/rejected": 0.0048616742715239525,
1128
+ "logps/chosen": -218.39337158203125,
1129
+ "logps/rejected": -250.11416625976562,
1130
+ "loss": 0.4467,
1131
+ "rewards/accuracies": 0.78125,
1132
+ "rewards/chosen": -1.5096694231033325,
1133
+ "rewards/margins": 1.196234107017517,
1134
+ "rewards/rejected": -2.7059032917022705,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.5181302995440794,
1139
+ "grad_norm": 24.853405341139933,
1140
+ "learning_rate": 5.363636363636363e-07,
1141
+ "logits/chosen": -0.0735924020409584,
1142
+ "logits/rejected": 0.022622695192694664,
1143
+ "logps/chosen": -216.3765411376953,
1144
+ "logps/rejected": -250.67161560058594,
1145
+ "loss": 0.4594,
1146
+ "rewards/accuracies": 0.796875,
1147
+ "rewards/chosen": -1.5826576948165894,
1148
+ "rewards/margins": 1.228342890739441,
1149
+ "rewards/rejected": -2.8110008239746094,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5249478034854489,
1154
+ "grad_norm": 20.713474784942502,
1155
+ "learning_rate": 5.287878787878788e-07,
1156
+ "logits/chosen": -0.07298550754785538,
1157
+ "logits/rejected": 0.015953145921230316,
1158
+ "logps/chosen": -213.88311767578125,
1159
+ "logps/rejected": -254.41146850585938,
1160
+ "loss": 0.4376,
1161
+ "rewards/accuracies": 0.8312500715255737,
1162
+ "rewards/chosen": -1.4484457969665527,
1163
+ "rewards/margins": 1.3493634462356567,
1164
+ "rewards/rejected": -2.797809362411499,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.5317653074268184,
1169
+ "grad_norm": 25.104952077400377,
1170
+ "learning_rate": 5.212121212121212e-07,
1171
+ "logits/chosen": -0.040183987468481064,
1172
+ "logits/rejected": 0.03515133634209633,
1173
+ "logps/chosen": -217.26239013671875,
1174
+ "logps/rejected": -248.25259399414062,
1175
+ "loss": 0.4695,
1176
+ "rewards/accuracies": 0.7578125,
1177
+ "rewards/chosen": -1.5951099395751953,
1178
+ "rewards/margins": 1.2722889184951782,
1179
+ "rewards/rejected": -2.867398738861084,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5385828113681879,
1184
+ "grad_norm": 28.29480749737075,
1185
+ "learning_rate": 5.136363636363636e-07,
1186
+ "logits/chosen": -0.04418431594967842,
1187
+ "logits/rejected": 0.05417613312602043,
1188
+ "logps/chosen": -222.26820373535156,
1189
+ "logps/rejected": -257.3962707519531,
1190
+ "loss": 0.4579,
1191
+ "rewards/accuracies": 0.7906250357627869,
1192
+ "rewards/chosen": -1.570731520652771,
1193
+ "rewards/margins": 1.1469626426696777,
1194
+ "rewards/rejected": -2.717694044113159,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5454003153095572,
1199
+ "grad_norm": 23.21073310542596,
1200
+ "learning_rate": 5.060606060606061e-07,
1201
+ "logits/chosen": -0.07153814285993576,
1202
+ "logits/rejected": 0.021596048027276993,
1203
+ "logps/chosen": -217.89630126953125,
1204
+ "logps/rejected": -254.9208221435547,
1205
+ "loss": 0.4123,
1206
+ "rewards/accuracies": 0.8250000476837158,
1207
+ "rewards/chosen": -1.5101759433746338,
1208
+ "rewards/margins": 1.3654392957687378,
1209
+ "rewards/rejected": -2.8756155967712402,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5522178192509267,
1214
+ "grad_norm": 21.00129246925032,
1215
+ "learning_rate": 4.984848484848485e-07,
1216
+ "logits/chosen": -0.06583255529403687,
1217
+ "logits/rejected": 0.04746149852871895,
1218
+ "logps/chosen": -214.39453125,
1219
+ "logps/rejected": -253.67242431640625,
1220
+ "loss": 0.4335,
1221
+ "rewards/accuracies": 0.785937488079071,
1222
+ "rewards/chosen": -1.6045914888381958,
1223
+ "rewards/margins": 1.34579598903656,
1224
+ "rewards/rejected": -2.950387477874756,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5590353231922962,
1229
+ "grad_norm": 25.238378218924026,
1230
+ "learning_rate": 4.909090909090909e-07,
1231
+ "logits/chosen": -0.06921117007732391,
1232
+ "logits/rejected": 0.03174077346920967,
1233
+ "logps/chosen": -213.57254028320312,
1234
+ "logps/rejected": -248.80799865722656,
1235
+ "loss": 0.4561,
1236
+ "rewards/accuracies": 0.7875000238418579,
1237
+ "rewards/chosen": -1.6258023977279663,
1238
+ "rewards/margins": 1.308131217956543,
1239
+ "rewards/rejected": -2.933933734893799,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.5658528271336657,
1244
+ "grad_norm": 22.691646670626245,
1245
+ "learning_rate": 4.833333333333333e-07,
1246
+ "logits/chosen": -0.08812057971954346,
1247
+ "logits/rejected": 0.028292154893279076,
1248
+ "logps/chosen": -217.73495483398438,
1249
+ "logps/rejected": -258.3476867675781,
1250
+ "loss": 0.4395,
1251
+ "rewards/accuracies": 0.8062499761581421,
1252
+ "rewards/chosen": -1.6993637084960938,
1253
+ "rewards/margins": 1.3441307544708252,
1254
+ "rewards/rejected": -3.043494462966919,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5726703310750352,
1259
+ "grad_norm": 26.78354678353827,
1260
+ "learning_rate": 4.7575757575757574e-07,
1261
+ "logits/chosen": -0.08428293466567993,
1262
+ "logits/rejected": 0.0038617942482233047,
1263
+ "logps/chosen": -228.9470977783203,
1264
+ "logps/rejected": -262.1487121582031,
1265
+ "loss": 0.4585,
1266
+ "rewards/accuracies": 0.7874999642372131,
1267
+ "rewards/chosen": -1.7086464166641235,
1268
+ "rewards/margins": 1.2672871351242065,
1269
+ "rewards/rejected": -2.97593355178833,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.5794878350164047,
1274
+ "grad_norm": 21.82947420474725,
1275
+ "learning_rate": 4.681818181818182e-07,
1276
+ "logits/chosen": -0.07127973437309265,
1277
+ "logits/rejected": 0.03848648816347122,
1278
+ "logps/chosen": -214.94436645507812,
1279
+ "logps/rejected": -252.94378662109375,
1280
+ "loss": 0.4487,
1281
+ "rewards/accuracies": 0.785937488079071,
1282
+ "rewards/chosen": -1.5856503248214722,
1283
+ "rewards/margins": 1.2746567726135254,
1284
+ "rewards/rejected": -2.860306978225708,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.586305338957774,
1289
+ "grad_norm": 25.33636086854273,
1290
+ "learning_rate": 4.606060606060606e-07,
1291
+ "logits/chosen": -0.0755903422832489,
1292
+ "logits/rejected": 0.007846422493457794,
1293
+ "logps/chosen": -229.74365234375,
1294
+ "logps/rejected": -266.0998229980469,
1295
+ "loss": 0.4466,
1296
+ "rewards/accuracies": 0.8109375238418579,
1297
+ "rewards/chosen": -1.6116007566452026,
1298
+ "rewards/margins": 1.3451875448226929,
1299
+ "rewards/rejected": -2.9567883014678955,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.5931228428991435,
1304
+ "grad_norm": 23.430791371783034,
1305
+ "learning_rate": 4.53030303030303e-07,
1306
+ "logits/chosen": -0.08032269030809402,
1307
+ "logits/rejected": 0.013129429891705513,
1308
+ "logps/chosen": -211.66123962402344,
1309
+ "logps/rejected": -249.7532501220703,
1310
+ "loss": 0.4168,
1311
+ "rewards/accuracies": 0.8296875357627869,
1312
+ "rewards/chosen": -1.4253365993499756,
1313
+ "rewards/margins": 1.4885873794555664,
1314
+ "rewards/rejected": -2.913924217224121,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.599940346840513,
1319
+ "grad_norm": 25.6792868240925,
1320
+ "learning_rate": 4.4545454545454544e-07,
1321
+ "logits/chosen": -0.10410317778587341,
1322
+ "logits/rejected": -0.02125217206776142,
1323
+ "logps/chosen": -211.6175994873047,
1324
+ "logps/rejected": -250.02481079101562,
1325
+ "loss": 0.4419,
1326
+ "rewards/accuracies": 0.776562511920929,
1327
+ "rewards/chosen": -1.4521470069885254,
1328
+ "rewards/margins": 1.4656074047088623,
1329
+ "rewards/rejected": -2.9177544116973877,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6067578507818825,
1334
+ "grad_norm": 24.336985726252383,
1335
+ "learning_rate": 4.3787878787878784e-07,
1336
+ "logits/chosen": -0.1017291247844696,
1337
+ "logits/rejected": -0.006951052229851484,
1338
+ "logps/chosen": -207.91339111328125,
1339
+ "logps/rejected": -244.61329650878906,
1340
+ "loss": 0.4338,
1341
+ "rewards/accuracies": 0.7953125238418579,
1342
+ "rewards/chosen": -1.3715894222259521,
1343
+ "rewards/margins": 1.3720262050628662,
1344
+ "rewards/rejected": -2.7436156272888184,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.613575354723252,
1349
+ "grad_norm": 28.717619953557637,
1350
+ "learning_rate": 4.303030303030303e-07,
1351
+ "logits/chosen": -0.12957513332366943,
1352
+ "logits/rejected": -0.04137944057583809,
1353
+ "logps/chosen": -212.1148223876953,
1354
+ "logps/rejected": -250.58657836914062,
1355
+ "loss": 0.4428,
1356
+ "rewards/accuracies": 0.8062500357627869,
1357
+ "rewards/chosen": -1.5294814109802246,
1358
+ "rewards/margins": 1.3217490911483765,
1359
+ "rewards/rejected": -2.8512306213378906,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.6203928586646215,
1364
+ "grad_norm": 24.64484800585095,
1365
+ "learning_rate": 4.227272727272727e-07,
1366
+ "logits/chosen": -0.1173202320933342,
1367
+ "logits/rejected": -0.026215719059109688,
1368
+ "logps/chosen": -219.25180053710938,
1369
+ "logps/rejected": -253.3756561279297,
1370
+ "loss": 0.4444,
1371
+ "rewards/accuracies": 0.7890625,
1372
+ "rewards/chosen": -1.5156140327453613,
1373
+ "rewards/margins": 1.2724617719650269,
1374
+ "rewards/rejected": -2.7880756855010986,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.6272103626059908,
1379
+ "grad_norm": 21.577769353125333,
1380
+ "learning_rate": 4.1515151515151513e-07,
1381
+ "logits/chosen": -0.09302366524934769,
1382
+ "logits/rejected": -0.017984673380851746,
1383
+ "logps/chosen": -225.01498413085938,
1384
+ "logps/rejected": -260.4620666503906,
1385
+ "loss": 0.4204,
1386
+ "rewards/accuracies": 0.8125,
1387
+ "rewards/chosen": -1.5388312339782715,
1388
+ "rewards/margins": 1.3805811405181885,
1389
+ "rewards/rejected": -2.91941237449646,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6340278665473603,
1394
+ "grad_norm": 21.851016424850197,
1395
+ "learning_rate": 4.075757575757576e-07,
1396
+ "logits/chosen": -0.12906233966350555,
1397
+ "logits/rejected": -0.042714815586805344,
1398
+ "logps/chosen": -224.21875,
1399
+ "logps/rejected": -261.9176940917969,
1400
+ "loss": 0.4137,
1401
+ "rewards/accuracies": 0.8343750238418579,
1402
+ "rewards/chosen": -1.6001354455947876,
1403
+ "rewards/margins": 1.3610618114471436,
1404
+ "rewards/rejected": -2.9611973762512207,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.6408453704887298,
1409
+ "grad_norm": 28.106139021916903,
1410
+ "learning_rate": 4e-07,
1411
+ "logits/chosen": -0.17769670486450195,
1412
+ "logits/rejected": -0.07489217072725296,
1413
+ "logps/chosen": -215.14065551757812,
1414
+ "logps/rejected": -260.8359680175781,
1415
+ "loss": 0.4011,
1416
+ "rewards/accuracies": 0.8265625238418579,
1417
+ "rewards/chosen": -1.5202898979187012,
1418
+ "rewards/margins": 1.60263991355896,
1419
+ "rewards/rejected": -3.122929811477661,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.6476628744300993,
1424
+ "grad_norm": 26.85542161068887,
1425
+ "learning_rate": 3.924242424242424e-07,
1426
+ "logits/chosen": -0.14841988682746887,
1427
+ "logits/rejected": -0.06951985508203506,
1428
+ "logps/chosen": -207.8277130126953,
1429
+ "logps/rejected": -250.3575439453125,
1430
+ "loss": 0.4295,
1431
+ "rewards/accuracies": 0.796875,
1432
+ "rewards/chosen": -1.5621274709701538,
1433
+ "rewards/margins": 1.4103821516036987,
1434
+ "rewards/rejected": -2.9725096225738525,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6544803783714688,
1439
+ "grad_norm": 30.007894138540276,
1440
+ "learning_rate": 3.8484848484848483e-07,
1441
+ "logits/chosen": -0.17307066917419434,
1442
+ "logits/rejected": -0.07763750106096268,
1443
+ "logps/chosen": -220.78807067871094,
1444
+ "logps/rejected": -258.3966369628906,
1445
+ "loss": 0.4118,
1446
+ "rewards/accuracies": 0.7984375357627869,
1447
+ "rewards/chosen": -1.657617449760437,
1448
+ "rewards/margins": 1.4771149158477783,
1449
+ "rewards/rejected": -3.134732723236084,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6612978823128383,
1454
+ "grad_norm": 29.07536633448252,
1455
+ "learning_rate": 3.7727272727272723e-07,
1456
+ "logits/chosen": -0.15869039297103882,
1457
+ "logits/rejected": -0.07338032126426697,
1458
+ "logps/chosen": -211.30084228515625,
1459
+ "logps/rejected": -255.395751953125,
1460
+ "loss": 0.4357,
1461
+ "rewards/accuracies": 0.8046875,
1462
+ "rewards/chosen": -1.6164686679840088,
1463
+ "rewards/margins": 1.3737252950668335,
1464
+ "rewards/rejected": -2.9901938438415527,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6681153862542076,
1469
+ "grad_norm": 28.531124567566387,
1470
+ "learning_rate": 3.696969696969697e-07,
1471
+ "logits/chosen": -0.13030777871608734,
1472
+ "logits/rejected": -0.046453818678855896,
1473
+ "logps/chosen": -216.94631958007812,
1474
+ "logps/rejected": -267.0611572265625,
1475
+ "loss": 0.4098,
1476
+ "rewards/accuracies": 0.8203125596046448,
1477
+ "rewards/chosen": -1.650040626525879,
1478
+ "rewards/margins": 1.7019206285476685,
1479
+ "rewards/rejected": -3.351961135864258,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6749328901955771,
1484
+ "grad_norm": 24.268025730195088,
1485
+ "learning_rate": 3.6212121212121213e-07,
1486
+ "logits/chosen": -0.15982282161712646,
1487
+ "logits/rejected": -0.0780140832066536,
1488
+ "logps/chosen": -215.56704711914062,
1489
+ "logps/rejected": -260.79736328125,
1490
+ "loss": 0.397,
1491
+ "rewards/accuracies": 0.828125,
1492
+ "rewards/chosen": -1.6652125120162964,
1493
+ "rewards/margins": 1.7730036973953247,
1494
+ "rewards/rejected": -3.438216209411621,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.6817503941369466,
1499
+ "grad_norm": 24.14951347534636,
1500
+ "learning_rate": 3.545454545454545e-07,
1501
+ "logits/chosen": -0.16655105352401733,
1502
+ "logits/rejected": -0.06034347787499428,
1503
+ "logps/chosen": -214.05752563476562,
1504
+ "logps/rejected": -254.98703002929688,
1505
+ "loss": 0.4264,
1506
+ "rewards/accuracies": 0.8031250238418579,
1507
+ "rewards/chosen": -1.8080203533172607,
1508
+ "rewards/margins": 1.4334427118301392,
1509
+ "rewards/rejected": -3.2414629459381104,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6885678980783161,
1514
+ "grad_norm": 28.566305760445395,
1515
+ "learning_rate": 3.46969696969697e-07,
1516
+ "logits/chosen": -0.19522453844547272,
1517
+ "logits/rejected": -0.08679309487342834,
1518
+ "logps/chosen": -227.52655029296875,
1519
+ "logps/rejected": -274.0975646972656,
1520
+ "loss": 0.3803,
1521
+ "rewards/accuracies": 0.8328125476837158,
1522
+ "rewards/chosen": -1.7928123474121094,
1523
+ "rewards/margins": 1.5590283870697021,
1524
+ "rewards/rejected": -3.3518409729003906,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.6953854020196856,
1529
+ "grad_norm": 47.85219425522013,
1530
+ "learning_rate": 3.393939393939394e-07,
1531
+ "logits/chosen": -0.1567739099264145,
1532
+ "logits/rejected": -0.06111231818795204,
1533
+ "logps/chosen": -228.47779846191406,
1534
+ "logps/rejected": -274.33428955078125,
1535
+ "loss": 0.4016,
1536
+ "rewards/accuracies": 0.8218750357627869,
1537
+ "rewards/chosen": -1.8795576095581055,
1538
+ "rewards/margins": 1.6040751934051514,
1539
+ "rewards/rejected": -3.4836325645446777,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.7022029059610551,
1544
+ "grad_norm": 27.480180578165882,
1545
+ "learning_rate": 3.318181818181818e-07,
1546
+ "logits/chosen": -0.14685329794883728,
1547
+ "logits/rejected": -0.04708694666624069,
1548
+ "logps/chosen": -229.77883911132812,
1549
+ "logps/rejected": -269.2246398925781,
1550
+ "loss": 0.434,
1551
+ "rewards/accuracies": 0.8156250715255737,
1552
+ "rewards/chosen": -1.9993985891342163,
1553
+ "rewards/margins": 1.541892647743225,
1554
+ "rewards/rejected": -3.5412912368774414,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7090204099024244,
1559
+ "grad_norm": 28.437076758644576,
1560
+ "learning_rate": 3.242424242424242e-07,
1561
+ "logits/chosen": -0.11219906061887741,
1562
+ "logits/rejected": 0.005472442135214806,
1563
+ "logps/chosen": -223.41433715820312,
1564
+ "logps/rejected": -263.777099609375,
1565
+ "loss": 0.4147,
1566
+ "rewards/accuracies": 0.8203125,
1567
+ "rewards/chosen": -1.9533647298812866,
1568
+ "rewards/margins": 1.5760959386825562,
1569
+ "rewards/rejected": -3.5294606685638428,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.7158379138437939,
1574
+ "grad_norm": 24.780730794756213,
1575
+ "learning_rate": 3.166666666666666e-07,
1576
+ "logits/chosen": -0.10113102942705154,
1577
+ "logits/rejected": -0.0247341338545084,
1578
+ "logps/chosen": -224.37001037597656,
1579
+ "logps/rejected": -268.121337890625,
1580
+ "loss": 0.4301,
1581
+ "rewards/accuracies": 0.8125,
1582
+ "rewards/chosen": -1.9565200805664062,
1583
+ "rewards/margins": 1.5767617225646973,
1584
+ "rewards/rejected": -3.5332815647125244,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7226554177851634,
1589
+ "grad_norm": 24.01977768454246,
1590
+ "learning_rate": 3.0909090909090907e-07,
1591
+ "logits/chosen": -0.16017019748687744,
1592
+ "logits/rejected": -0.038362376391887665,
1593
+ "logps/chosen": -221.4423828125,
1594
+ "logps/rejected": -269.96832275390625,
1595
+ "loss": 0.3642,
1596
+ "rewards/accuracies": 0.8578125238418579,
1597
+ "rewards/chosen": -1.7076702117919922,
1598
+ "rewards/margins": 1.7623119354248047,
1599
+ "rewards/rejected": -3.469982147216797,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.7294729217265329,
1604
+ "grad_norm": 23.607998459504426,
1605
+ "learning_rate": 3.015151515151515e-07,
1606
+ "logits/chosen": -0.10493813455104828,
1607
+ "logits/rejected": -0.015611783601343632,
1608
+ "logps/chosen": -225.48379516601562,
1609
+ "logps/rejected": -265.3271789550781,
1610
+ "loss": 0.3929,
1611
+ "rewards/accuracies": 0.8359375,
1612
+ "rewards/chosen": -1.8727744817733765,
1613
+ "rewards/margins": 1.5633747577667236,
1614
+ "rewards/rejected": -3.4361491203308105,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.7362904256679024,
1619
+ "grad_norm": 27.095218567106365,
1620
+ "learning_rate": 2.939393939393939e-07,
1621
+ "logits/chosen": -0.17973893880844116,
1622
+ "logits/rejected": -0.0618341825902462,
1623
+ "logps/chosen": -216.41917419433594,
1624
+ "logps/rejected": -259.7709655761719,
1625
+ "loss": 0.4123,
1626
+ "rewards/accuracies": 0.8218750357627869,
1627
+ "rewards/chosen": -1.995064377784729,
1628
+ "rewards/margins": 1.4902169704437256,
1629
+ "rewards/rejected": -3.485281467437744,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7431079296092719,
1634
+ "grad_norm": 25.027731308583423,
1635
+ "learning_rate": 2.8636363636363637e-07,
1636
+ "logits/chosen": -0.15540730953216553,
1637
+ "logits/rejected": -0.03777886554598808,
1638
+ "logps/chosen": -228.9147186279297,
1639
+ "logps/rejected": -270.1064758300781,
1640
+ "loss": 0.3986,
1641
+ "rewards/accuracies": 0.831250011920929,
1642
+ "rewards/chosen": -2.030261993408203,
1643
+ "rewards/margins": 1.6086986064910889,
1644
+ "rewards/rejected": -3.638960599899292,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7499254335506412,
1649
+ "grad_norm": 32.01602660729836,
1650
+ "learning_rate": 2.787878787878788e-07,
1651
+ "logits/chosen": -0.11517558991909027,
1652
+ "logits/rejected": -0.03462303429841995,
1653
+ "logps/chosen": -224.9439697265625,
1654
+ "logps/rejected": -259.7923889160156,
1655
+ "loss": 0.4219,
1656
+ "rewards/accuracies": 0.800000011920929,
1657
+ "rewards/chosen": -1.9747884273529053,
1658
+ "rewards/margins": 1.3838858604431152,
1659
+ "rewards/rejected": -3.3586747646331787,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7567429374920107,
1664
+ "grad_norm": 31.517350763988723,
1665
+ "learning_rate": 2.712121212121212e-07,
1666
+ "logits/chosen": -0.12141910940408707,
1667
+ "logits/rejected": -0.01053343154489994,
1668
+ "logps/chosen": -220.55685424804688,
1669
+ "logps/rejected": -259.1381530761719,
1670
+ "loss": 0.4178,
1671
+ "rewards/accuracies": 0.8109375238418579,
1672
+ "rewards/chosen": -1.887320637702942,
1673
+ "rewards/margins": 1.4266173839569092,
1674
+ "rewards/rejected": -3.3139376640319824,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.7635604414333802,
1679
+ "grad_norm": 26.127671898389973,
1680
+ "learning_rate": 2.636363636363636e-07,
1681
+ "logits/chosen": -0.08841400593519211,
1682
+ "logits/rejected": -0.0024342993274331093,
1683
+ "logps/chosen": -226.21588134765625,
1684
+ "logps/rejected": -265.4579772949219,
1685
+ "loss": 0.4114,
1686
+ "rewards/accuracies": 0.8328125476837158,
1687
+ "rewards/chosen": -1.9491462707519531,
1688
+ "rewards/margins": 1.4079630374908447,
1689
+ "rewards/rejected": -3.357109308242798,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7703779453747497,
1694
+ "grad_norm": 27.236714171841353,
1695
+ "learning_rate": 2.56060606060606e-07,
1696
+ "logits/chosen": -0.09334474056959152,
1697
+ "logits/rejected": 0.006139551289379597,
1698
+ "logps/chosen": -221.79632568359375,
1699
+ "logps/rejected": -259.6671447753906,
1700
+ "loss": 0.4146,
1701
+ "rewards/accuracies": 0.8125000596046448,
1702
+ "rewards/chosen": -1.8307971954345703,
1703
+ "rewards/margins": 1.6026943922042847,
1704
+ "rewards/rejected": -3.4334912300109863,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7771954493161192,
1709
+ "grad_norm": 23.66366648859467,
1710
+ "learning_rate": 2.4848484848484846e-07,
1711
+ "logits/chosen": -0.11060778051614761,
1712
+ "logits/rejected": -0.020527532324194908,
1713
+ "logps/chosen": -217.34706115722656,
1714
+ "logps/rejected": -258.9071350097656,
1715
+ "loss": 0.4164,
1716
+ "rewards/accuracies": 0.8093750476837158,
1717
+ "rewards/chosen": -1.7729765176773071,
1718
+ "rewards/margins": 1.513333797454834,
1719
+ "rewards/rejected": -3.2863101959228516,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7840129532574887,
1724
+ "grad_norm": 25.2882907135726,
1725
+ "learning_rate": 2.409090909090909e-07,
1726
+ "logits/chosen": -0.10374785959720612,
1727
+ "logits/rejected": -0.018276991322636604,
1728
+ "logps/chosen": -236.36245727539062,
1729
+ "logps/rejected": -274.1772766113281,
1730
+ "loss": 0.4001,
1731
+ "rewards/accuracies": 0.828125,
1732
+ "rewards/chosen": -1.8536320924758911,
1733
+ "rewards/margins": 1.4811946153640747,
1734
+ "rewards/rejected": -3.3348264694213867,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.790830457198858,
1739
+ "grad_norm": 26.790523464833836,
1740
+ "learning_rate": 2.3333333333333333e-07,
1741
+ "logits/chosen": -0.13400709629058838,
1742
+ "logits/rejected": -0.03729373216629028,
1743
+ "logps/chosen": -220.23544311523438,
1744
+ "logps/rejected": -264.3974304199219,
1745
+ "loss": 0.3736,
1746
+ "rewards/accuracies": 0.8374999761581421,
1747
+ "rewards/chosen": -1.7529268264770508,
1748
+ "rewards/margins": 1.6899100542068481,
1749
+ "rewards/rejected": -3.4428367614746094,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.7976479611402275,
1754
+ "grad_norm": 19.702274370294905,
1755
+ "learning_rate": 2.2575757575757576e-07,
1756
+ "logits/chosen": -0.17225009202957153,
1757
+ "logits/rejected": -0.06531926244497299,
1758
+ "logps/chosen": -217.31951904296875,
1759
+ "logps/rejected": -262.68475341796875,
1760
+ "loss": 0.3893,
1761
+ "rewards/accuracies": 0.8296875357627869,
1762
+ "rewards/chosen": -1.773924469947815,
1763
+ "rewards/margins": 1.5378942489624023,
1764
+ "rewards/rejected": -3.311818838119507,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.804465465081597,
1769
+ "grad_norm": 23.425791089677848,
1770
+ "learning_rate": 2.1818181818181815e-07,
1771
+ "logits/chosen": -0.1890868991613388,
1772
+ "logits/rejected": -0.09723814576864243,
1773
+ "logps/chosen": -237.30435180664062,
1774
+ "logps/rejected": -280.53155517578125,
1775
+ "loss": 0.4049,
1776
+ "rewards/accuracies": 0.8046875,
1777
+ "rewards/chosen": -1.9801266193389893,
1778
+ "rewards/margins": 1.6612507104873657,
1779
+ "rewards/rejected": -3.6413774490356445,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8112829690229665,
1784
+ "grad_norm": 24.870512384975044,
1785
+ "learning_rate": 2.106060606060606e-07,
1786
+ "logits/chosen": -0.15967592597007751,
1787
+ "logits/rejected": -0.04680642858147621,
1788
+ "logps/chosen": -226.22872924804688,
1789
+ "logps/rejected": -259.76031494140625,
1790
+ "loss": 0.4036,
1791
+ "rewards/accuracies": 0.8218750357627869,
1792
+ "rewards/chosen": -1.8575615882873535,
1793
+ "rewards/margins": 1.4922484159469604,
1794
+ "rewards/rejected": -3.3498101234436035,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.818100472964336,
1799
+ "grad_norm": 24.491520644166894,
1800
+ "learning_rate": 2.0303030303030303e-07,
1801
+ "logits/chosen": -0.1476404070854187,
1802
+ "logits/rejected": -0.056609444320201874,
1803
+ "logps/chosen": -218.4747314453125,
1804
+ "logps/rejected": -266.4804382324219,
1805
+ "loss": 0.3761,
1806
+ "rewards/accuracies": 0.839062511920929,
1807
+ "rewards/chosen": -1.8029754161834717,
1808
+ "rewards/margins": 1.7781448364257812,
1809
+ "rewards/rejected": -3.581120014190674,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.8249179769057055,
1814
+ "grad_norm": 25.43453775835723,
1815
+ "learning_rate": 1.9545454545454545e-07,
1816
+ "logits/chosen": -0.19677500426769257,
1817
+ "logits/rejected": -0.1171552985906601,
1818
+ "logps/chosen": -229.79348754882812,
1819
+ "logps/rejected": -272.9264831542969,
1820
+ "loss": 0.3763,
1821
+ "rewards/accuracies": 0.8343750238418579,
1822
+ "rewards/chosen": -1.9184120893478394,
1823
+ "rewards/margins": 1.567906379699707,
1824
+ "rewards/rejected": -3.486318349838257,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.8317354808470748,
1829
+ "grad_norm": 28.16577093909573,
1830
+ "learning_rate": 1.8787878787878785e-07,
1831
+ "logits/chosen": -0.2017778754234314,
1832
+ "logits/rejected": -0.09277643263339996,
1833
+ "logps/chosen": -231.24696350097656,
1834
+ "logps/rejected": -278.23516845703125,
1835
+ "loss": 0.3691,
1836
+ "rewards/accuracies": 0.854687511920929,
1837
+ "rewards/chosen": -1.9549689292907715,
1838
+ "rewards/margins": 1.7604336738586426,
1839
+ "rewards/rejected": -3.715402603149414,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8385529847884443,
1844
+ "grad_norm": 21.84200034001996,
1845
+ "learning_rate": 1.803030303030303e-07,
1846
+ "logits/chosen": -0.21481652557849884,
1847
+ "logits/rejected": -0.09980207681655884,
1848
+ "logps/chosen": -233.2796630859375,
1849
+ "logps/rejected": -282.91644287109375,
1850
+ "loss": 0.3711,
1851
+ "rewards/accuracies": 0.8609374761581421,
1852
+ "rewards/chosen": -2.0337018966674805,
1853
+ "rewards/margins": 1.896054983139038,
1854
+ "rewards/rejected": -3.9297573566436768,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.8453704887298138,
1859
+ "grad_norm": 22.694435946938032,
1860
+ "learning_rate": 1.7272727272727272e-07,
1861
+ "logits/chosen": -0.15690943598747253,
1862
+ "logits/rejected": -0.06275378912687302,
1863
+ "logps/chosen": -227.27455139160156,
1864
+ "logps/rejected": -274.53790283203125,
1865
+ "loss": 0.3913,
1866
+ "rewards/accuracies": 0.846875011920929,
1867
+ "rewards/chosen": -1.9616923332214355,
1868
+ "rewards/margins": 1.6192635297775269,
1869
+ "rewards/rejected": -3.580955982208252,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8521879926711833,
1874
+ "grad_norm": 26.094403500936398,
1875
+ "learning_rate": 1.6515151515151515e-07,
1876
+ "logits/chosen": -0.19916404783725739,
1877
+ "logits/rejected": -0.08219482004642487,
1878
+ "logps/chosen": -227.81277465820312,
1879
+ "logps/rejected": -274.002685546875,
1880
+ "loss": 0.3713,
1881
+ "rewards/accuracies": 0.84375,
1882
+ "rewards/chosen": -2.1085541248321533,
1883
+ "rewards/margins": 1.6733149290084839,
1884
+ "rewards/rejected": -3.7818689346313477,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8590054966125528,
1889
+ "grad_norm": 28.285289787916426,
1890
+ "learning_rate": 1.5757575757575757e-07,
1891
+ "logits/chosen": -0.20249146223068237,
1892
+ "logits/rejected": -0.11430975049734116,
1893
+ "logps/chosen": -224.1591796875,
1894
+ "logps/rejected": -267.00555419921875,
1895
+ "loss": 0.355,
1896
+ "rewards/accuracies": 0.8531250357627869,
1897
+ "rewards/chosen": -2.015260934829712,
1898
+ "rewards/margins": 1.7478997707366943,
1899
+ "rewards/rejected": -3.7631607055664062,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.8658230005539223,
1904
+ "grad_norm": 28.62125408987568,
1905
+ "learning_rate": 1.5e-07,
1906
+ "logits/chosen": -0.23050257563591003,
1907
+ "logits/rejected": -0.13684435188770294,
1908
+ "logps/chosen": -226.26783752441406,
1909
+ "logps/rejected": -269.7716979980469,
1910
+ "loss": 0.3747,
1911
+ "rewards/accuracies": 0.840624988079071,
1912
+ "rewards/chosen": -1.9981603622436523,
1913
+ "rewards/margins": 1.6271231174468994,
1914
+ "rewards/rejected": -3.6252834796905518,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8726405044952916,
1919
+ "grad_norm": 28.239549647986106,
1920
+ "learning_rate": 1.4242424242424242e-07,
1921
+ "logits/chosen": -0.21466362476348877,
1922
+ "logits/rejected": -0.11737212538719177,
1923
+ "logps/chosen": -225.7066650390625,
1924
+ "logps/rejected": -269.4527893066406,
1925
+ "loss": 0.3437,
1926
+ "rewards/accuracies": 0.862500011920929,
1927
+ "rewards/chosen": -2.0757107734680176,
1928
+ "rewards/margins": 1.816016674041748,
1929
+ "rewards/rejected": -3.8917269706726074,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8794580084366611,
1934
+ "grad_norm": 31.488998628971228,
1935
+ "learning_rate": 1.3484848484848484e-07,
1936
+ "logits/chosen": -0.2509796619415283,
1937
+ "logits/rejected": -0.15716706216335297,
1938
+ "logps/chosen": -227.94644165039062,
1939
+ "logps/rejected": -273.2456970214844,
1940
+ "loss": 0.3713,
1941
+ "rewards/accuracies": 0.8312500715255737,
1942
+ "rewards/chosen": -2.063997268676758,
1943
+ "rewards/margins": 1.7757971286773682,
1944
+ "rewards/rejected": -3.839794635772705,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.8862755123780306,
1949
+ "grad_norm": 30.3120062725158,
1950
+ "learning_rate": 1.2727272727272726e-07,
1951
+ "logits/chosen": -0.22213181853294373,
1952
+ "logits/rejected": -0.10703583061695099,
1953
+ "logps/chosen": -230.4038848876953,
1954
+ "logps/rejected": -277.89312744140625,
1955
+ "loss": 0.3841,
1956
+ "rewards/accuracies": 0.8343750238418579,
1957
+ "rewards/chosen": -2.1271657943725586,
1958
+ "rewards/margins": 1.8834253549575806,
1959
+ "rewards/rejected": -4.01059103012085,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.8930930163194001,
1964
+ "grad_norm": 20.44931799862506,
1965
+ "learning_rate": 1.196969696969697e-07,
1966
+ "logits/chosen": -0.19441619515419006,
1967
+ "logits/rejected": -0.09727019816637039,
1968
+ "logps/chosen": -221.7393035888672,
1969
+ "logps/rejected": -264.74078369140625,
1970
+ "loss": 0.3754,
1971
+ "rewards/accuracies": 0.831250011920929,
1972
+ "rewards/chosen": -2.1274771690368652,
1973
+ "rewards/margins": 1.6642124652862549,
1974
+ "rewards/rejected": -3.791689395904541,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.8999105202607696,
1979
+ "grad_norm": 29.43599693108452,
1980
+ "learning_rate": 1.1212121212121211e-07,
1981
+ "logits/chosen": -0.20746608078479767,
1982
+ "logits/rejected": -0.10938036441802979,
1983
+ "logps/chosen": -230.49411010742188,
1984
+ "logps/rejected": -274.149169921875,
1985
+ "loss": 0.4089,
1986
+ "rewards/accuracies": 0.8140624761581421,
1987
+ "rewards/chosen": -2.111417293548584,
1988
+ "rewards/margins": 1.6498727798461914,
1989
+ "rewards/rejected": -3.7612900733947754,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.9067280242021389,
1994
+ "grad_norm": 32.13529497456355,
1995
+ "learning_rate": 1.0454545454545454e-07,
1996
+ "logits/chosen": -0.19128209352493286,
1997
+ "logits/rejected": -0.09950501471757889,
1998
+ "logps/chosen": -232.70655822753906,
1999
+ "logps/rejected": -277.17218017578125,
2000
+ "loss": 0.4075,
2001
+ "rewards/accuracies": 0.8171875476837158,
2002
+ "rewards/chosen": -2.2094011306762695,
2003
+ "rewards/margins": 1.6824061870574951,
2004
+ "rewards/rejected": -3.8918075561523438,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9135455281435084,
2009
+ "grad_norm": 24.495028721716935,
2010
+ "learning_rate": 9.696969696969696e-08,
2011
+ "logits/chosen": -0.2125847339630127,
2012
+ "logits/rejected": -0.13061244785785675,
2013
+ "logps/chosen": -238.3480682373047,
2014
+ "logps/rejected": -278.2753601074219,
2015
+ "loss": 0.3927,
2016
+ "rewards/accuracies": 0.828125,
2017
+ "rewards/chosen": -1.9954111576080322,
2018
+ "rewards/margins": 1.6787245273590088,
2019
+ "rewards/rejected": -3.674135208129883,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9203630320848779,
2024
+ "grad_norm": 18.494054905792385,
2025
+ "learning_rate": 8.93939393939394e-08,
2026
+ "logits/chosen": -0.22991694509983063,
2027
+ "logits/rejected": -0.1375647336244583,
2028
+ "logps/chosen": -232.1606903076172,
2029
+ "logps/rejected": -277.8919677734375,
2030
+ "loss": 0.3625,
2031
+ "rewards/accuracies": 0.846875011920929,
2032
+ "rewards/chosen": -1.9369205236434937,
2033
+ "rewards/margins": 1.840739369392395,
2034
+ "rewards/rejected": -3.7776598930358887,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.9271805360262474,
2039
+ "grad_norm": 23.0952598889843,
2040
+ "learning_rate": 8.181818181818182e-08,
2041
+ "logits/chosen": -0.2471987009048462,
2042
+ "logits/rejected": -0.15926134586334229,
2043
+ "logps/chosen": -226.1959991455078,
2044
+ "logps/rejected": -268.29144287109375,
2045
+ "loss": 0.3859,
2046
+ "rewards/accuracies": 0.828125,
2047
+ "rewards/chosen": -1.9533106088638306,
2048
+ "rewards/margins": 1.6697773933410645,
2049
+ "rewards/rejected": -3.6230881214141846,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.9339980399676169,
2054
+ "grad_norm": 31.57121472361453,
2055
+ "learning_rate": 7.424242424242424e-08,
2056
+ "logits/chosen": -0.21243150532245636,
2057
+ "logits/rejected": -0.10772553086280823,
2058
+ "logps/chosen": -229.5428924560547,
2059
+ "logps/rejected": -267.2238464355469,
2060
+ "loss": 0.41,
2061
+ "rewards/accuracies": 0.8093750476837158,
2062
+ "rewards/chosen": -2.041501998901367,
2063
+ "rewards/margins": 1.6097761392593384,
2064
+ "rewards/rejected": -3.651278257369995,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.9408155439089864,
2069
+ "grad_norm": 24.660859941372323,
2070
+ "learning_rate": 6.666666666666667e-08,
2071
+ "logits/chosen": -0.2305571436882019,
2072
+ "logits/rejected": -0.13984078168869019,
2073
+ "logps/chosen": -235.77882385253906,
2074
+ "logps/rejected": -275.8688659667969,
2075
+ "loss": 0.3632,
2076
+ "rewards/accuracies": 0.8515625,
2077
+ "rewards/chosen": -1.9864195585250854,
2078
+ "rewards/margins": 1.6625468730926514,
2079
+ "rewards/rejected": -3.6489667892456055,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.9476330478503557,
2084
+ "grad_norm": 21.4536097610763,
2085
+ "learning_rate": 5.9090909090909085e-08,
2086
+ "logits/chosen": -0.2120400369167328,
2087
+ "logits/rejected": -0.12410594522953033,
2088
+ "logps/chosen": -222.5196075439453,
2089
+ "logps/rejected": -271.9889831542969,
2090
+ "loss": 0.3386,
2091
+ "rewards/accuracies": 0.8750000596046448,
2092
+ "rewards/chosen": -1.9523049592971802,
2093
+ "rewards/margins": 1.976583480834961,
2094
+ "rewards/rejected": -3.9288883209228516,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9544505517917252,
2099
+ "grad_norm": 24.15173339303778,
2100
+ "learning_rate": 5.151515151515151e-08,
2101
+ "logits/chosen": -0.2382027804851532,
2102
+ "logits/rejected": -0.15090808272361755,
2103
+ "logps/chosen": -222.01071166992188,
2104
+ "logps/rejected": -271.6986389160156,
2105
+ "loss": 0.3614,
2106
+ "rewards/accuracies": 0.8453124761581421,
2107
+ "rewards/chosen": -2.006227731704712,
2108
+ "rewards/margins": 1.7343711853027344,
2109
+ "rewards/rejected": -3.740598678588867,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9612680557330947,
2114
+ "grad_norm": 24.550521505283424,
2115
+ "learning_rate": 4.393939393939393e-08,
2116
+ "logits/chosen": -0.17552296817302704,
2117
+ "logits/rejected": -0.08083190023899078,
2118
+ "logps/chosen": -228.05532836914062,
2119
+ "logps/rejected": -272.7567443847656,
2120
+ "loss": 0.4114,
2121
+ "rewards/accuracies": 0.8218749761581421,
2122
+ "rewards/chosen": -2.1707372665405273,
2123
+ "rewards/margins": 1.5515494346618652,
2124
+ "rewards/rejected": -3.7222867012023926,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9680855596744642,
2129
+ "grad_norm": 22.32856821239117,
2130
+ "learning_rate": 3.636363636363636e-08,
2131
+ "logits/chosen": -0.22575151920318604,
2132
+ "logits/rejected": -0.12315725535154343,
2133
+ "logps/chosen": -227.44094848632812,
2134
+ "logps/rejected": -275.5225830078125,
2135
+ "loss": 0.3557,
2136
+ "rewards/accuracies": 0.846875011920929,
2137
+ "rewards/chosen": -1.9577521085739136,
2138
+ "rewards/margins": 1.8534328937530518,
2139
+ "rewards/rejected": -3.811184883117676,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.9749030636158337,
2144
+ "grad_norm": 29.28483492412063,
2145
+ "learning_rate": 2.8787878787878787e-08,
2146
+ "logits/chosen": -0.20699195563793182,
2147
+ "logits/rejected": -0.11024124175310135,
2148
+ "logps/chosen": -226.45755004882812,
2149
+ "logps/rejected": -270.3711242675781,
2150
+ "loss": 0.3872,
2151
+ "rewards/accuracies": 0.8343750238418579,
2152
+ "rewards/chosen": -2.00704288482666,
2153
+ "rewards/margins": 1.5971609354019165,
2154
+ "rewards/rejected": -3.604203939437866,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.9817205675572032,
2159
+ "grad_norm": 25.452990040575536,
2160
+ "learning_rate": 2.1212121212121214e-08,
2161
+ "logits/chosen": -0.2176055610179901,
2162
+ "logits/rejected": -0.09939160197973251,
2163
+ "logps/chosen": -219.8345947265625,
2164
+ "logps/rejected": -268.5072937011719,
2165
+ "loss": 0.3557,
2166
+ "rewards/accuracies": 0.859375,
2167
+ "rewards/chosen": -2.036515235900879,
2168
+ "rewards/margins": 1.7241967916488647,
2169
+ "rewards/rejected": -3.760712146759033,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 0.9885380714985725,
2174
+ "grad_norm": 21.433383519856523,
2175
+ "learning_rate": 1.3636363636363635e-08,
2176
+ "logits/chosen": -0.19624559581279755,
2177
+ "logits/rejected": -0.10556697845458984,
2178
+ "logps/chosen": -229.1326141357422,
2179
+ "logps/rejected": -277.2064514160156,
2180
+ "loss": 0.3724,
2181
+ "rewards/accuracies": 0.8500000238418579,
2182
+ "rewards/chosen": -2.0669782161712646,
2183
+ "rewards/margins": 1.8541796207427979,
2184
+ "rewards/rejected": -3.9211580753326416,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 0.995355575439942,
2189
+ "grad_norm": 24.137434407958857,
2190
+ "learning_rate": 6.06060606060606e-09,
2191
+ "logits/chosen": -0.19412463903427124,
2192
+ "logits/rejected": -0.08993732929229736,
2193
+ "logps/chosen": -227.69815063476562,
2194
+ "logps/rejected": -274.43865966796875,
2195
+ "loss": 0.3698,
2196
+ "rewards/accuracies": 0.848437488079071,
2197
+ "rewards/chosen": -1.9875869750976562,
2198
+ "rewards/margins": 1.7459800243377686,
2199
+ "rewards/rejected": -3.7335667610168457,
2200
+ "step": 1460
2201
+ },
2202
+ {
2203
+ "epoch": 1.0,
2204
+ "step": 1467,
2205
+ "total_flos": 161507922542592.0,
2206
+ "train_loss": 0.48762158089620206,
2207
+ "train_runtime": 14310.7821,
2208
+ "train_samples_per_second": 6.56,
2209
+ "train_steps_per_second": 0.103
2210
+ }
2211
+ ],
2212
+ "logging_steps": 10,
2213
+ "max_steps": 1467,
2214
+ "num_input_tokens_seen": 0,
2215
+ "num_train_epochs": 1,
2216
+ "save_steps": 500,
2217
+ "stateful_callbacks": {
2218
+ "TrainerControl": {
2219
+ "args": {
2220
+ "should_epoch_stop": false,
2221
+ "should_evaluate": false,
2222
+ "should_log": false,
2223
+ "should_save": true,
2224
+ "should_training_stop": true
2225
+ },
2226
+ "attributes": {}
2227
+ }
2228
+ },
2229
+ "total_flos": 161507922542592.0,
2230
+ "train_batch_size": 1,
2231
+ "trial_name": null,
2232
+ "trial_params": null
2233
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72b55421ab9cfb084e8fe8ac9e40252762b0dbc8d67713d0c2366a9d3d44378b
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED