davidanugraha commited on
Commit
d18012b
·
verified ·
1 Parent(s): 33001fd

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_rmr1
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_rmr1
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_rmr1 dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 5e-07
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160669524688896.0,
4
+ "train_loss": 0.5754778021889021,
5
+ "train_runtime": 14476.1212,
6
+ "train_samples_per_second": 6.439,
7
+ "train_steps_per_second": 0.101
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a36bd4028c43bf626b6bad8d060d2812d234c7f075e71145c654531d5f89cd13
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b77dddb5138e49224767e7fd15ae7988347d45f79e42d0b0704ac7757d68b991
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160669524688896.0,
4
+ "train_loss": 0.5754778021889021,
5
+ "train_runtime": 14476.1212,
6
+ "train_samples_per_second": 6.439,
7
+ "train_steps_per_second": 0.101
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1457, "loss": 0.6932, "accuracy": 0.4124999940395355, "lr": 3.082191780821918e-08, "epoch": 0.006866363402283066, "percentage": 0.69, "elapsed_time": "0:01:44", "remaining_time": "4:11:05"}
2
+ {"current_steps": 20, "total_steps": 1457, "loss": 0.6937, "accuracy": 0.515625, "lr": 6.506849315068492e-08, "epoch": 0.013732726804566131, "percentage": 1.37, "elapsed_time": "0:03:27", "remaining_time": "4:08:38"}
3
+ {"current_steps": 30, "total_steps": 1457, "loss": 0.6923, "accuracy": 0.534375011920929, "lr": 9.931506849315068e-08, "epoch": 0.020599090206849198, "percentage": 2.06, "elapsed_time": "0:05:04", "remaining_time": "4:01:46"}
4
+ {"current_steps": 40, "total_steps": 1457, "loss": 0.6947, "accuracy": 0.48750001192092896, "lr": 1.3356164383561644e-07, "epoch": 0.027465453609132263, "percentage": 2.75, "elapsed_time": "0:06:43", "remaining_time": "3:57:56"}
5
+ {"current_steps": 50, "total_steps": 1457, "loss": 0.6941, "accuracy": 0.5234375, "lr": 1.6780821917808218e-07, "epoch": 0.03433181701141533, "percentage": 3.43, "elapsed_time": "0:08:24", "remaining_time": "3:56:26"}
6
+ {"current_steps": 60, "total_steps": 1457, "loss": 0.6959, "accuracy": 0.4906250238418579, "lr": 2.0205479452054795e-07, "epoch": 0.041198180413698396, "percentage": 4.12, "elapsed_time": "0:10:02", "remaining_time": "3:53:39"}
7
+ {"current_steps": 70, "total_steps": 1457, "loss": 0.6928, "accuracy": 0.49375003576278687, "lr": 2.363013698630137e-07, "epoch": 0.048064543815981464, "percentage": 4.8, "elapsed_time": "0:11:38", "remaining_time": "3:50:34"}
8
+ {"current_steps": 80, "total_steps": 1457, "loss": 0.6913, "accuracy": 0.5218750238418579, "lr": 2.7054794520547945e-07, "epoch": 0.054930907218264526, "percentage": 5.49, "elapsed_time": "0:13:20", "remaining_time": "3:49:44"}
9
+ {"current_steps": 90, "total_steps": 1457, "loss": 0.6931, "accuracy": 0.5218749642372131, "lr": 3.047945205479452e-07, "epoch": 0.061797270620547594, "percentage": 6.18, "elapsed_time": "0:15:02", "remaining_time": "3:48:20"}
10
+ {"current_steps": 100, "total_steps": 1457, "loss": 0.6899, "accuracy": 0.5281250476837158, "lr": 3.39041095890411e-07, "epoch": 0.06866363402283066, "percentage": 6.86, "elapsed_time": "0:16:39", "remaining_time": "3:46:02"}
11
+ {"current_steps": 110, "total_steps": 1457, "loss": 0.689, "accuracy": 0.546875, "lr": 3.7328767123287667e-07, "epoch": 0.07552999742511372, "percentage": 7.55, "elapsed_time": "0:18:17", "remaining_time": "3:44:04"}
12
+ {"current_steps": 120, "total_steps": 1457, "loss": 0.6885, "accuracy": 0.5750000476837158, "lr": 4.0753424657534246e-07, "epoch": 0.08239636082739679, "percentage": 8.24, "elapsed_time": "0:20:04", "remaining_time": "3:43:41"}
13
+ {"current_steps": 130, "total_steps": 1457, "loss": 0.6842, "accuracy": 0.606249988079071, "lr": 4.417808219178082e-07, "epoch": 0.08926272422967986, "percentage": 8.92, "elapsed_time": "0:21:52", "remaining_time": "3:43:21"}
14
+ {"current_steps": 140, "total_steps": 1457, "loss": 0.686, "accuracy": 0.5390625596046448, "lr": 4.7602739726027394e-07, "epoch": 0.09612908763196293, "percentage": 9.61, "elapsed_time": "0:23:33", "remaining_time": "3:41:37"}
15
+ {"current_steps": 150, "total_steps": 1457, "loss": 0.6797, "accuracy": 0.6140625476837158, "lr": 4.988558352402745e-07, "epoch": 0.10299545103424598, "percentage": 10.3, "elapsed_time": "0:25:13", "remaining_time": "3:39:47"}
16
+ {"current_steps": 160, "total_steps": 1457, "loss": 0.6782, "accuracy": 0.6109375357627869, "lr": 4.950419527078566e-07, "epoch": 0.10986181443652905, "percentage": 10.98, "elapsed_time": "0:26:51", "remaining_time": "3:37:45"}
17
+ {"current_steps": 170, "total_steps": 1457, "loss": 0.6774, "accuracy": 0.604687511920929, "lr": 4.912280701754385e-07, "epoch": 0.11672817783881212, "percentage": 11.67, "elapsed_time": "0:28:35", "remaining_time": "3:36:29"}
18
+ {"current_steps": 180, "total_steps": 1457, "loss": 0.67, "accuracy": 0.6343750357627869, "lr": 4.874141876430206e-07, "epoch": 0.12359454124109519, "percentage": 12.35, "elapsed_time": "0:30:13", "remaining_time": "3:34:27"}
19
+ {"current_steps": 190, "total_steps": 1457, "loss": 0.6723, "accuracy": 0.573437511920929, "lr": 4.836003051106026e-07, "epoch": 0.13046090464337826, "percentage": 13.04, "elapsed_time": "0:31:51", "remaining_time": "3:32:26"}
20
+ {"current_steps": 200, "total_steps": 1457, "loss": 0.6638, "accuracy": 0.6343750357627869, "lr": 4.797864225781846e-07, "epoch": 0.1373272680456613, "percentage": 13.73, "elapsed_time": "0:33:35", "remaining_time": "3:31:05"}
21
+ {"current_steps": 210, "total_steps": 1457, "loss": 0.666, "accuracy": 0.6312500238418579, "lr": 4.759725400457666e-07, "epoch": 0.1441936314479444, "percentage": 14.41, "elapsed_time": "0:35:14", "remaining_time": "3:29:16"}
22
+ {"current_steps": 220, "total_steps": 1457, "loss": 0.6633, "accuracy": 0.6296875476837158, "lr": 4.7215865751334857e-07, "epoch": 0.15105999485022745, "percentage": 15.1, "elapsed_time": "0:36:50", "remaining_time": "3:27:11"}
23
+ {"current_steps": 230, "total_steps": 1457, "loss": 0.6537, "accuracy": 0.6484375, "lr": 4.6834477498093057e-07, "epoch": 0.1579263582525105, "percentage": 15.79, "elapsed_time": "0:38:27", "remaining_time": "3:25:09"}
24
+ {"current_steps": 240, "total_steps": 1457, "loss": 0.6514, "accuracy": 0.6343750357627869, "lr": 4.6453089244851257e-07, "epoch": 0.16479272165479358, "percentage": 16.47, "elapsed_time": "0:40:08", "remaining_time": "3:23:32"}
25
+ {"current_steps": 250, "total_steps": 1457, "loss": 0.6452, "accuracy": 0.6781250238418579, "lr": 4.607170099160946e-07, "epoch": 0.17165908505707664, "percentage": 17.16, "elapsed_time": "0:41:49", "remaining_time": "3:21:57"}
26
+ {"current_steps": 260, "total_steps": 1457, "loss": 0.6508, "accuracy": 0.6421875357627869, "lr": 4.569031273836766e-07, "epoch": 0.17852544845935972, "percentage": 17.84, "elapsed_time": "0:43:50", "remaining_time": "3:21:50"}
27
+ {"current_steps": 270, "total_steps": 1457, "loss": 0.6475, "accuracy": 0.6484375, "lr": 4.5308924485125853e-07, "epoch": 0.18539181186164277, "percentage": 18.53, "elapsed_time": "0:45:30", "remaining_time": "3:20:04"}
28
+ {"current_steps": 280, "total_steps": 1457, "loss": 0.6457, "accuracy": 0.6656250357627869, "lr": 4.4927536231884053e-07, "epoch": 0.19225817526392586, "percentage": 19.22, "elapsed_time": "0:47:10", "remaining_time": "3:18:17"}
29
+ {"current_steps": 290, "total_steps": 1457, "loss": 0.6373, "accuracy": 0.6484375596046448, "lr": 4.454614797864226e-07, "epoch": 0.1991245386662089, "percentage": 19.9, "elapsed_time": "0:48:46", "remaining_time": "3:16:18"}
30
+ {"current_steps": 300, "total_steps": 1457, "loss": 0.629, "accuracy": 0.703125, "lr": 4.416475972540046e-07, "epoch": 0.20599090206849197, "percentage": 20.59, "elapsed_time": "0:50:26", "remaining_time": "3:14:31"}
31
+ {"current_steps": 310, "total_steps": 1457, "loss": 0.6295, "accuracy": 0.6859375238418579, "lr": 4.3783371472158654e-07, "epoch": 0.21285726547077505, "percentage": 21.28, "elapsed_time": "0:52:06", "remaining_time": "3:12:46"}
32
+ {"current_steps": 320, "total_steps": 1457, "loss": 0.6173, "accuracy": 0.671875, "lr": 4.3401983218916855e-07, "epoch": 0.2197236288730581, "percentage": 21.96, "elapsed_time": "0:53:47", "remaining_time": "3:11:09"}
33
+ {"current_steps": 330, "total_steps": 1457, "loss": 0.6263, "accuracy": 0.6734375357627869, "lr": 4.3020594965675055e-07, "epoch": 0.22658999227534118, "percentage": 22.65, "elapsed_time": "0:55:25", "remaining_time": "3:09:16"}
34
+ {"current_steps": 340, "total_steps": 1457, "loss": 0.6296, "accuracy": 0.6640625, "lr": 4.2639206712433255e-07, "epoch": 0.23345635567762424, "percentage": 23.34, "elapsed_time": "0:57:05", "remaining_time": "3:07:34"}
35
+ {"current_steps": 350, "total_steps": 1457, "loss": 0.633, "accuracy": 0.6625000238418579, "lr": 4.2257818459191456e-07, "epoch": 0.2403227190799073, "percentage": 24.02, "elapsed_time": "0:58:45", "remaining_time": "3:05:50"}
36
+ {"current_steps": 360, "total_steps": 1457, "loss": 0.6223, "accuracy": 0.690625011920929, "lr": 4.187643020594965e-07, "epoch": 0.24718908248219038, "percentage": 24.71, "elapsed_time": "1:00:24", "remaining_time": "3:04:04"}
37
+ {"current_steps": 370, "total_steps": 1457, "loss": 0.6137, "accuracy": 0.6890624761581421, "lr": 4.1495041952707856e-07, "epoch": 0.25405544588447343, "percentage": 25.39, "elapsed_time": "1:02:01", "remaining_time": "3:02:13"}
38
+ {"current_steps": 380, "total_steps": 1457, "loss": 0.6003, "accuracy": 0.7046874761581421, "lr": 4.1113653699466057e-07, "epoch": 0.2609218092867565, "percentage": 26.08, "elapsed_time": "1:03:42", "remaining_time": "3:00:32"}
39
+ {"current_steps": 390, "total_steps": 1457, "loss": 0.613, "accuracy": 0.6781250238418579, "lr": 4.0732265446224257e-07, "epoch": 0.2677881726890396, "percentage": 26.77, "elapsed_time": "1:05:24", "remaining_time": "2:58:56"}
40
+ {"current_steps": 400, "total_steps": 1457, "loss": 0.601, "accuracy": 0.7109375, "lr": 4.035087719298245e-07, "epoch": 0.2746545360913226, "percentage": 27.45, "elapsed_time": "1:07:06", "remaining_time": "2:57:19"}
41
+ {"current_steps": 410, "total_steps": 1457, "loss": 0.5947, "accuracy": 0.703125, "lr": 3.996948893974065e-07, "epoch": 0.2815208994936057, "percentage": 28.14, "elapsed_time": "1:08:42", "remaining_time": "2:55:27"}
42
+ {"current_steps": 420, "total_steps": 1457, "loss": 0.609, "accuracy": 0.6843750476837158, "lr": 3.9588100686498853e-07, "epoch": 0.2883872628958888, "percentage": 28.83, "elapsed_time": "1:10:22", "remaining_time": "2:53:45"}
43
+ {"current_steps": 430, "total_steps": 1457, "loss": 0.6196, "accuracy": 0.671875, "lr": 3.9206712433257053e-07, "epoch": 0.2952536262981718, "percentage": 29.51, "elapsed_time": "1:12:06", "remaining_time": "2:52:12"}
44
+ {"current_steps": 440, "total_steps": 1457, "loss": 0.5946, "accuracy": 0.6937500238418579, "lr": 3.882532418001526e-07, "epoch": 0.3021199897004549, "percentage": 30.2, "elapsed_time": "1:13:44", "remaining_time": "2:50:26"}
45
+ {"current_steps": 450, "total_steps": 1457, "loss": 0.6148, "accuracy": 0.684374988079071, "lr": 3.8443935926773454e-07, "epoch": 0.308986353102738, "percentage": 30.89, "elapsed_time": "1:15:24", "remaining_time": "2:48:45"}
46
+ {"current_steps": 460, "total_steps": 1457, "loss": 0.5991, "accuracy": 0.682812511920929, "lr": 3.8062547673531654e-07, "epoch": 0.315852716505021, "percentage": 31.57, "elapsed_time": "1:17:04", "remaining_time": "2:47:03"}
47
+ {"current_steps": 470, "total_steps": 1457, "loss": 0.61, "accuracy": 0.6734375357627869, "lr": 3.7681159420289855e-07, "epoch": 0.3227190799073041, "percentage": 32.26, "elapsed_time": "1:18:40", "remaining_time": "2:45:13"}
48
+ {"current_steps": 480, "total_steps": 1457, "loss": 0.5981, "accuracy": 0.6875000596046448, "lr": 3.7299771167048055e-07, "epoch": 0.32958544330958717, "percentage": 32.94, "elapsed_time": "1:20:13", "remaining_time": "2:43:18"}
49
+ {"current_steps": 490, "total_steps": 1457, "loss": 0.5837, "accuracy": 0.7250000238418579, "lr": 3.691838291380625e-07, "epoch": 0.33645180671187025, "percentage": 33.63, "elapsed_time": "1:22:00", "remaining_time": "2:41:50"}
50
+ {"current_steps": 500, "total_steps": 1457, "loss": 0.5909, "accuracy": 0.703125, "lr": 3.653699466056445e-07, "epoch": 0.3433181701141533, "percentage": 34.32, "elapsed_time": "1:23:34", "remaining_time": "2:39:57"}
51
+ {"current_steps": 510, "total_steps": 1457, "loss": 0.584, "accuracy": 0.7109375, "lr": 3.615560640732265e-07, "epoch": 0.35018453351643636, "percentage": 35.0, "elapsed_time": "1:25:29", "remaining_time": "2:38:44"}
52
+ {"current_steps": 520, "total_steps": 1457, "loss": 0.5673, "accuracy": 0.7250000238418579, "lr": 3.5774218154080856e-07, "epoch": 0.35705089691871944, "percentage": 35.69, "elapsed_time": "1:27:10", "remaining_time": "2:37:05"}
53
+ {"current_steps": 530, "total_steps": 1457, "loss": 0.5954, "accuracy": 0.684374988079071, "lr": 3.5392829900839057e-07, "epoch": 0.36391726032100247, "percentage": 36.38, "elapsed_time": "1:28:49", "remaining_time": "2:35:22"}
54
+ {"current_steps": 540, "total_steps": 1457, "loss": 0.5762, "accuracy": 0.7000000476837158, "lr": 3.501144164759725e-07, "epoch": 0.37078362372328555, "percentage": 37.06, "elapsed_time": "1:30:24", "remaining_time": "2:33:32"}
55
+ {"current_steps": 550, "total_steps": 1457, "loss": 0.563, "accuracy": 0.7484375238418579, "lr": 3.463005339435545e-07, "epoch": 0.37764998712556863, "percentage": 37.75, "elapsed_time": "1:32:02", "remaining_time": "2:31:47"}
56
+ {"current_steps": 560, "total_steps": 1457, "loss": 0.5741, "accuracy": 0.699999988079071, "lr": 3.424866514111365e-07, "epoch": 0.3845163505278517, "percentage": 38.44, "elapsed_time": "1:33:41", "remaining_time": "2:30:04"}
57
+ {"current_steps": 570, "total_steps": 1457, "loss": 0.562, "accuracy": 0.714062511920929, "lr": 3.3867276887871853e-07, "epoch": 0.39138271393013474, "percentage": 39.12, "elapsed_time": "1:35:21", "remaining_time": "2:28:23"}
58
+ {"current_steps": 580, "total_steps": 1457, "loss": 0.5746, "accuracy": 0.7046874761581421, "lr": 3.348588863463005e-07, "epoch": 0.3982490773324178, "percentage": 39.81, "elapsed_time": "1:36:59", "remaining_time": "2:26:40"}
59
+ {"current_steps": 590, "total_steps": 1457, "loss": 0.574, "accuracy": 0.7124999761581421, "lr": 3.310450038138825e-07, "epoch": 0.4051154407347009, "percentage": 40.49, "elapsed_time": "1:38:36", "remaining_time": "2:24:54"}
60
+ {"current_steps": 600, "total_steps": 1457, "loss": 0.5756, "accuracy": 0.7046875357627869, "lr": 3.2723112128146454e-07, "epoch": 0.41198180413698393, "percentage": 41.18, "elapsed_time": "1:40:17", "remaining_time": "2:23:14"}
61
+ {"current_steps": 610, "total_steps": 1457, "loss": 0.5566, "accuracy": 0.7421875, "lr": 3.2341723874904654e-07, "epoch": 0.418848167539267, "percentage": 41.87, "elapsed_time": "1:41:54", "remaining_time": "2:21:30"}
62
+ {"current_steps": 620, "total_steps": 1457, "loss": 0.5954, "accuracy": 0.667187511920929, "lr": 3.1960335621662854e-07, "epoch": 0.4257145309415501, "percentage": 42.55, "elapsed_time": "1:43:29", "remaining_time": "2:19:42"}
63
+ {"current_steps": 630, "total_steps": 1457, "loss": 0.5581, "accuracy": 0.7390625476837158, "lr": 3.157894736842105e-07, "epoch": 0.4325808943438331, "percentage": 43.24, "elapsed_time": "1:45:08", "remaining_time": "2:18:01"}
64
+ {"current_steps": 640, "total_steps": 1457, "loss": 0.547, "accuracy": 0.75, "lr": 3.119755911517925e-07, "epoch": 0.4394472577461162, "percentage": 43.93, "elapsed_time": "1:46:46", "remaining_time": "2:16:18"}
65
+ {"current_steps": 650, "total_steps": 1457, "loss": 0.5785, "accuracy": 0.7093749642372131, "lr": 3.081617086193745e-07, "epoch": 0.4463136211483993, "percentage": 44.61, "elapsed_time": "1:48:24", "remaining_time": "2:14:35"}
66
+ {"current_steps": 660, "total_steps": 1457, "loss": 0.5592, "accuracy": 0.7406249642372131, "lr": 3.043478260869565e-07, "epoch": 0.45317998455068237, "percentage": 45.3, "elapsed_time": "1:50:07", "remaining_time": "2:12:59"}
67
+ {"current_steps": 670, "total_steps": 1457, "loss": 0.5636, "accuracy": 0.7359375357627869, "lr": 3.005339435545385e-07, "epoch": 0.4600463479529654, "percentage": 45.98, "elapsed_time": "1:51:43", "remaining_time": "2:11:14"}
68
+ {"current_steps": 680, "total_steps": 1457, "loss": 0.5671, "accuracy": 0.7421875, "lr": 2.967200610221205e-07, "epoch": 0.4669127113552485, "percentage": 46.67, "elapsed_time": "1:53:21", "remaining_time": "2:09:31"}
69
+ {"current_steps": 690, "total_steps": 1457, "loss": 0.5621, "accuracy": 0.7234375476837158, "lr": 2.929061784897025e-07, "epoch": 0.47377907475753156, "percentage": 47.36, "elapsed_time": "1:55:03", "remaining_time": "2:07:53"}
70
+ {"current_steps": 700, "total_steps": 1457, "loss": 0.5599, "accuracy": 0.703125, "lr": 2.890922959572845e-07, "epoch": 0.4806454381598146, "percentage": 48.04, "elapsed_time": "1:56:43", "remaining_time": "2:06:13"}
71
+ {"current_steps": 710, "total_steps": 1457, "loss": 0.5553, "accuracy": 0.7093750238418579, "lr": 2.852784134248665e-07, "epoch": 0.48751180156209767, "percentage": 48.73, "elapsed_time": "1:58:21", "remaining_time": "2:04:31"}
72
+ {"current_steps": 720, "total_steps": 1457, "loss": 0.5597, "accuracy": 0.7156250476837158, "lr": 2.8146453089244847e-07, "epoch": 0.49437816496438075, "percentage": 49.42, "elapsed_time": "1:59:55", "remaining_time": "2:02:45"}
73
+ {"current_steps": 730, "total_steps": 1457, "loss": 0.5693, "accuracy": 0.7124999761581421, "lr": 2.776506483600305e-07, "epoch": 0.5012445283666638, "percentage": 50.1, "elapsed_time": "2:01:32", "remaining_time": "2:01:02"}
74
+ {"current_steps": 740, "total_steps": 1457, "loss": 0.5536, "accuracy": 0.7328125238418579, "lr": 2.738367658276125e-07, "epoch": 0.5081108917689469, "percentage": 50.79, "elapsed_time": "2:03:10", "remaining_time": "1:59:20"}
75
+ {"current_steps": 750, "total_steps": 1457, "loss": 0.5538, "accuracy": 0.7281250357627869, "lr": 2.7002288329519454e-07, "epoch": 0.5149772551712299, "percentage": 51.48, "elapsed_time": "2:04:44", "remaining_time": "1:57:35"}
76
+ {"current_steps": 760, "total_steps": 1457, "loss": 0.5472, "accuracy": 0.7265625, "lr": 2.662090007627765e-07, "epoch": 0.521843618573513, "percentage": 52.16, "elapsed_time": "2:06:42", "remaining_time": "1:56:12"}
77
+ {"current_steps": 770, "total_steps": 1457, "loss": 0.5449, "accuracy": 0.731249988079071, "lr": 2.623951182303585e-07, "epoch": 0.5287099819757961, "percentage": 52.85, "elapsed_time": "2:08:23", "remaining_time": "1:54:33"}
78
+ {"current_steps": 780, "total_steps": 1457, "loss": 0.5401, "accuracy": 0.7203124761581421, "lr": 2.585812356979405e-07, "epoch": 0.5355763453780792, "percentage": 53.53, "elapsed_time": "2:10:00", "remaining_time": "1:52:50"}
79
+ {"current_steps": 790, "total_steps": 1457, "loss": 0.5675, "accuracy": 0.7015624642372131, "lr": 2.547673531655225e-07, "epoch": 0.5424427087803622, "percentage": 54.22, "elapsed_time": "2:11:39", "remaining_time": "1:51:09"}
80
+ {"current_steps": 800, "total_steps": 1457, "loss": 0.5502, "accuracy": 0.7374999523162842, "lr": 2.509534706331045e-07, "epoch": 0.5493090721826452, "percentage": 54.91, "elapsed_time": "2:13:26", "remaining_time": "1:49:35"}
81
+ {"current_steps": 810, "total_steps": 1457, "loss": 0.541, "accuracy": 0.7515624761581421, "lr": 2.471395881006865e-07, "epoch": 0.5561754355849283, "percentage": 55.59, "elapsed_time": "2:15:06", "remaining_time": "1:47:54"}
82
+ {"current_steps": 820, "total_steps": 1457, "loss": 0.5414, "accuracy": 0.7390625476837158, "lr": 2.4332570556826846e-07, "epoch": 0.5630417989872114, "percentage": 56.28, "elapsed_time": "2:16:43", "remaining_time": "1:46:12"}
83
+ {"current_steps": 830, "total_steps": 1457, "loss": 0.5367, "accuracy": 0.7578125, "lr": 2.395118230358505e-07, "epoch": 0.5699081623894945, "percentage": 56.97, "elapsed_time": "2:18:17", "remaining_time": "1:44:28"}
84
+ {"current_steps": 840, "total_steps": 1457, "loss": 0.5326, "accuracy": 0.7593749761581421, "lr": 2.3569794050343246e-07, "epoch": 0.5767745257917776, "percentage": 57.65, "elapsed_time": "2:19:56", "remaining_time": "1:42:47"}
85
+ {"current_steps": 850, "total_steps": 1457, "loss": 0.5613, "accuracy": 0.7171875238418579, "lr": 2.318840579710145e-07, "epoch": 0.5836408891940605, "percentage": 58.34, "elapsed_time": "2:21:31", "remaining_time": "1:41:03"}
86
+ {"current_steps": 860, "total_steps": 1457, "loss": 0.5404, "accuracy": 0.7421875, "lr": 2.2807017543859647e-07, "epoch": 0.5905072525963436, "percentage": 59.03, "elapsed_time": "2:23:04", "remaining_time": "1:39:19"}
87
+ {"current_steps": 870, "total_steps": 1457, "loss": 0.5283, "accuracy": 0.7718750238418579, "lr": 2.2425629290617847e-07, "epoch": 0.5973736159986267, "percentage": 59.71, "elapsed_time": "2:24:43", "remaining_time": "1:37:38"}
88
+ {"current_steps": 880, "total_steps": 1457, "loss": 0.5364, "accuracy": 0.7265625596046448, "lr": 2.204424103737605e-07, "epoch": 0.6042399794009098, "percentage": 60.4, "elapsed_time": "2:26:20", "remaining_time": "1:35:57"}
89
+ {"current_steps": 890, "total_steps": 1457, "loss": 0.5617, "accuracy": 0.7203125357627869, "lr": 2.1662852784134248e-07, "epoch": 0.6111063428031929, "percentage": 61.08, "elapsed_time": "2:27:55", "remaining_time": "1:34:14"}
90
+ {"current_steps": 900, "total_steps": 1457, "loss": 0.5388, "accuracy": 0.753125011920929, "lr": 2.1281464530892448e-07, "epoch": 0.617972706205476, "percentage": 61.77, "elapsed_time": "2:29:35", "remaining_time": "1:32:34"}
91
+ {"current_steps": 910, "total_steps": 1457, "loss": 0.538, "accuracy": 0.721875011920929, "lr": 2.0900076277650646e-07, "epoch": 0.624839069607759, "percentage": 62.46, "elapsed_time": "2:31:14", "remaining_time": "1:30:54"}
92
+ {"current_steps": 920, "total_steps": 1457, "loss": 0.5427, "accuracy": 0.7312500476837158, "lr": 2.051868802440885e-07, "epoch": 0.631705433010042, "percentage": 63.14, "elapsed_time": "2:32:56", "remaining_time": "1:29:16"}
93
+ {"current_steps": 930, "total_steps": 1457, "loss": 0.5025, "accuracy": 0.78125, "lr": 2.0137299771167047e-07, "epoch": 0.6385717964123251, "percentage": 63.83, "elapsed_time": "2:34:35", "remaining_time": "1:27:36"}
94
+ {"current_steps": 940, "total_steps": 1457, "loss": 0.5416, "accuracy": 0.7156250476837158, "lr": 1.9755911517925247e-07, "epoch": 0.6454381598146082, "percentage": 64.52, "elapsed_time": "2:36:15", "remaining_time": "1:25:56"}
95
+ {"current_steps": 950, "total_steps": 1457, "loss": 0.5358, "accuracy": 0.7281249761581421, "lr": 1.9374523264683445e-07, "epoch": 0.6523045232168913, "percentage": 65.2, "elapsed_time": "2:37:52", "remaining_time": "1:24:15"}
96
+ {"current_steps": 960, "total_steps": 1457, "loss": 0.5448, "accuracy": 0.734375, "lr": 1.8993135011441648e-07, "epoch": 0.6591708866191743, "percentage": 65.89, "elapsed_time": "2:39:28", "remaining_time": "1:22:33"}
97
+ {"current_steps": 970, "total_steps": 1457, "loss": 0.5278, "accuracy": 0.75, "lr": 1.8611746758199848e-07, "epoch": 0.6660372500214574, "percentage": 66.58, "elapsed_time": "2:41:01", "remaining_time": "1:20:50"}
98
+ {"current_steps": 980, "total_steps": 1457, "loss": 0.5325, "accuracy": 0.7718750238418579, "lr": 1.8230358504958046e-07, "epoch": 0.6729036134237405, "percentage": 67.26, "elapsed_time": "2:42:41", "remaining_time": "1:19:11"}
99
+ {"current_steps": 990, "total_steps": 1457, "loss": 0.5553, "accuracy": 0.746874988079071, "lr": 1.7848970251716246e-07, "epoch": 0.6797699768260235, "percentage": 67.95, "elapsed_time": "2:44:15", "remaining_time": "1:17:28"}
100
+ {"current_steps": 1000, "total_steps": 1457, "loss": 0.5147, "accuracy": 0.760937511920929, "lr": 1.7467581998474446e-07, "epoch": 0.6866363402283066, "percentage": 68.63, "elapsed_time": "2:45:55", "remaining_time": "1:15:49"}
101
+ {"current_steps": 1010, "total_steps": 1457, "loss": 0.5326, "accuracy": 0.7406250238418579, "lr": 1.7086193745232647e-07, "epoch": 0.6935027036305896, "percentage": 69.32, "elapsed_time": "2:47:51", "remaining_time": "1:14:17"}
102
+ {"current_steps": 1020, "total_steps": 1457, "loss": 0.5303, "accuracy": 0.7578125596046448, "lr": 1.6704805491990844e-07, "epoch": 0.7003690670328727, "percentage": 70.01, "elapsed_time": "2:49:31", "remaining_time": "1:12:37"}
103
+ {"current_steps": 1030, "total_steps": 1457, "loss": 0.5397, "accuracy": 0.7359375357627869, "lr": 1.6323417238749045e-07, "epoch": 0.7072354304351558, "percentage": 70.69, "elapsed_time": "2:51:07", "remaining_time": "1:10:56"}
104
+ {"current_steps": 1040, "total_steps": 1457, "loss": 0.5401, "accuracy": 0.7515625357627869, "lr": 1.5942028985507245e-07, "epoch": 0.7141017938374389, "percentage": 71.38, "elapsed_time": "2:52:44", "remaining_time": "1:09:15"}
105
+ {"current_steps": 1050, "total_steps": 1457, "loss": 0.5287, "accuracy": 0.7562500238418579, "lr": 1.5560640732265446e-07, "epoch": 0.720968157239722, "percentage": 72.07, "elapsed_time": "2:54:22", "remaining_time": "1:07:35"}
106
+ {"current_steps": 1060, "total_steps": 1457, "loss": 0.5515, "accuracy": 0.7124999761581421, "lr": 1.5179252479023646e-07, "epoch": 0.7278345206420049, "percentage": 72.75, "elapsed_time": "2:55:59", "remaining_time": "1:05:54"}
107
+ {"current_steps": 1070, "total_steps": 1457, "loss": 0.5169, "accuracy": 0.7484375238418579, "lr": 1.4797864225781844e-07, "epoch": 0.734700884044288, "percentage": 73.44, "elapsed_time": "2:57:37", "remaining_time": "1:04:14"}
108
+ {"current_steps": 1080, "total_steps": 1457, "loss": 0.5423, "accuracy": 0.737500011920929, "lr": 1.4416475972540047e-07, "epoch": 0.7415672474465711, "percentage": 74.12, "elapsed_time": "2:59:14", "remaining_time": "1:02:34"}
109
+ {"current_steps": 1090, "total_steps": 1457, "loss": 0.5251, "accuracy": 0.776562511920929, "lr": 1.4035087719298244e-07, "epoch": 0.7484336108488542, "percentage": 74.81, "elapsed_time": "3:00:50", "remaining_time": "1:00:53"}
110
+ {"current_steps": 1100, "total_steps": 1457, "loss": 0.4939, "accuracy": 0.7640625238418579, "lr": 1.3653699466056445e-07, "epoch": 0.7552999742511373, "percentage": 75.5, "elapsed_time": "3:02:32", "remaining_time": "0:59:14"}
111
+ {"current_steps": 1110, "total_steps": 1457, "loss": 0.5141, "accuracy": 0.7546875476837158, "lr": 1.3272311212814645e-07, "epoch": 0.7621663376534203, "percentage": 76.18, "elapsed_time": "3:04:06", "remaining_time": "0:57:33"}
112
+ {"current_steps": 1120, "total_steps": 1457, "loss": 0.5309, "accuracy": 0.7312500476837158, "lr": 1.2890922959572845e-07, "epoch": 0.7690327010557034, "percentage": 76.87, "elapsed_time": "3:05:43", "remaining_time": "0:55:52"}
113
+ {"current_steps": 1130, "total_steps": 1457, "loss": 0.5151, "accuracy": 0.7734375, "lr": 1.2509534706331046e-07, "epoch": 0.7758990644579864, "percentage": 77.56, "elapsed_time": "3:07:22", "remaining_time": "0:54:13"}
114
+ {"current_steps": 1140, "total_steps": 1457, "loss": 0.5208, "accuracy": 0.7578125596046448, "lr": 1.2128146453089243e-07, "epoch": 0.7827654278602695, "percentage": 78.24, "elapsed_time": "3:08:54", "remaining_time": "0:52:31"}
115
+ {"current_steps": 1150, "total_steps": 1457, "loss": 0.52, "accuracy": 0.7562500238418579, "lr": 1.1746758199847444e-07, "epoch": 0.7896317912625526, "percentage": 78.93, "elapsed_time": "3:10:36", "remaining_time": "0:50:52"}
116
+ {"current_steps": 1160, "total_steps": 1457, "loss": 0.5272, "accuracy": 0.7500000596046448, "lr": 1.1365369946605644e-07, "epoch": 0.7964981546648356, "percentage": 79.62, "elapsed_time": "3:12:12", "remaining_time": "0:49:12"}
117
+ {"current_steps": 1170, "total_steps": 1457, "loss": 0.5052, "accuracy": 0.760937511920929, "lr": 1.0983981693363843e-07, "epoch": 0.8033645180671187, "percentage": 80.3, "elapsed_time": "3:13:46", "remaining_time": "0:47:31"}
118
+ {"current_steps": 1180, "total_steps": 1457, "loss": 0.5308, "accuracy": 0.7718750238418579, "lr": 1.0602593440122045e-07, "epoch": 0.8102308814694018, "percentage": 80.99, "elapsed_time": "3:15:24", "remaining_time": "0:45:52"}
119
+ {"current_steps": 1190, "total_steps": 1457, "loss": 0.5198, "accuracy": 0.7578125, "lr": 1.0221205186880244e-07, "epoch": 0.8170972448716848, "percentage": 81.67, "elapsed_time": "3:16:59", "remaining_time": "0:44:12"}
120
+ {"current_steps": 1200, "total_steps": 1457, "loss": 0.5135, "accuracy": 0.7640625238418579, "lr": 9.839816933638444e-08, "epoch": 0.8239636082739679, "percentage": 82.36, "elapsed_time": "3:18:35", "remaining_time": "0:42:31"}
121
+ {"current_steps": 1210, "total_steps": 1457, "loss": 0.5341, "accuracy": 0.7359374761581421, "lr": 9.458428680396643e-08, "epoch": 0.830829971676251, "percentage": 83.05, "elapsed_time": "3:20:18", "remaining_time": "0:40:53"}
122
+ {"current_steps": 1220, "total_steps": 1457, "loss": 0.51, "accuracy": 0.762499988079071, "lr": 9.077040427154843e-08, "epoch": 0.837696335078534, "percentage": 83.73, "elapsed_time": "3:21:57", "remaining_time": "0:39:14"}
123
+ {"current_steps": 1230, "total_steps": 1457, "loss": 0.5175, "accuracy": 0.765625, "lr": 8.695652173913042e-08, "epoch": 0.8445626984808171, "percentage": 84.42, "elapsed_time": "3:23:35", "remaining_time": "0:37:34"}
124
+ {"current_steps": 1240, "total_steps": 1457, "loss": 0.5419, "accuracy": 0.7515625357627869, "lr": 8.314263920671243e-08, "epoch": 0.8514290618831002, "percentage": 85.11, "elapsed_time": "3:25:11", "remaining_time": "0:35:54"}
125
+ {"current_steps": 1250, "total_steps": 1457, "loss": 0.5386, "accuracy": 0.7484375238418579, "lr": 7.932875667429442e-08, "epoch": 0.8582954252853833, "percentage": 85.79, "elapsed_time": "3:26:53", "remaining_time": "0:34:15"}
126
+ {"current_steps": 1260, "total_steps": 1457, "loss": 0.5072, "accuracy": 0.7562500238418579, "lr": 7.551487414187643e-08, "epoch": 0.8651617886876662, "percentage": 86.48, "elapsed_time": "3:28:55", "remaining_time": "0:32:39"}
127
+ {"current_steps": 1270, "total_steps": 1457, "loss": 0.5378, "accuracy": 0.7171875238418579, "lr": 7.170099160945843e-08, "epoch": 0.8720281520899493, "percentage": 87.17, "elapsed_time": "3:30:35", "remaining_time": "0:31:00"}
128
+ {"current_steps": 1280, "total_steps": 1457, "loss": 0.5169, "accuracy": 0.7468750476837158, "lr": 6.788710907704043e-08, "epoch": 0.8788945154922324, "percentage": 87.85, "elapsed_time": "3:32:15", "remaining_time": "0:29:21"}
129
+ {"current_steps": 1290, "total_steps": 1457, "loss": 0.4972, "accuracy": 0.7906249761581421, "lr": 6.407322654462242e-08, "epoch": 0.8857608788945155, "percentage": 88.54, "elapsed_time": "3:33:54", "remaining_time": "0:27:41"}
130
+ {"current_steps": 1300, "total_steps": 1457, "loss": 0.5156, "accuracy": 0.7640625238418579, "lr": 6.025934401220442e-08, "epoch": 0.8926272422967986, "percentage": 89.22, "elapsed_time": "3:35:33", "remaining_time": "0:26:01"}
131
+ {"current_steps": 1310, "total_steps": 1457, "loss": 0.517, "accuracy": 0.760937511920929, "lr": 5.644546147978642e-08, "epoch": 0.8994936056990817, "percentage": 89.91, "elapsed_time": "3:37:07", "remaining_time": "0:24:21"}
132
+ {"current_steps": 1320, "total_steps": 1457, "loss": 0.5129, "accuracy": 0.768750011920929, "lr": 5.2631578947368416e-08, "epoch": 0.9063599691013647, "percentage": 90.6, "elapsed_time": "3:38:44", "remaining_time": "0:22:42"}
133
+ {"current_steps": 1330, "total_steps": 1457, "loss": 0.518, "accuracy": 0.7562500238418579, "lr": 4.881769641495042e-08, "epoch": 0.9132263325036477, "percentage": 91.28, "elapsed_time": "3:40:25", "remaining_time": "0:21:02"}
134
+ {"current_steps": 1340, "total_steps": 1457, "loss": 0.5245, "accuracy": 0.754687488079071, "lr": 4.5003813882532416e-08, "epoch": 0.9200926959059308, "percentage": 91.97, "elapsed_time": "3:41:57", "remaining_time": "0:19:22"}
135
+ {"current_steps": 1350, "total_steps": 1457, "loss": 0.5043, "accuracy": 0.7421875, "lr": 4.118993135011441e-08, "epoch": 0.9269590593082139, "percentage": 92.66, "elapsed_time": "3:43:34", "remaining_time": "0:17:43"}
136
+ {"current_steps": 1360, "total_steps": 1457, "loss": 0.5196, "accuracy": 0.754687488079071, "lr": 3.737604881769641e-08, "epoch": 0.933825422710497, "percentage": 93.34, "elapsed_time": "3:45:12", "remaining_time": "0:16:03"}
137
+ {"current_steps": 1370, "total_steps": 1457, "loss": 0.5305, "accuracy": 0.745312511920929, "lr": 3.356216628527841e-08, "epoch": 0.94069178611278, "percentage": 94.03, "elapsed_time": "3:46:51", "remaining_time": "0:14:24"}
138
+ {"current_steps": 1380, "total_steps": 1457, "loss": 0.4831, "accuracy": 0.792187511920929, "lr": 2.9748283752860413e-08, "epoch": 0.9475581495150631, "percentage": 94.72, "elapsed_time": "3:48:29", "remaining_time": "0:12:44"}
139
+ {"current_steps": 1390, "total_steps": 1457, "loss": 0.491, "accuracy": 0.776562511920929, "lr": 2.593440122044241e-08, "epoch": 0.9544245129173462, "percentage": 95.4, "elapsed_time": "3:50:08", "remaining_time": "0:11:05"}
140
+ {"current_steps": 1400, "total_steps": 1457, "loss": 0.521, "accuracy": 0.7421875, "lr": 2.212051868802441e-08, "epoch": 0.9612908763196292, "percentage": 96.09, "elapsed_time": "3:51:44", "remaining_time": "0:09:26"}
141
+ {"current_steps": 1410, "total_steps": 1457, "loss": 0.5076, "accuracy": 0.7562500238418579, "lr": 1.8306636155606407e-08, "epoch": 0.9681572397219123, "percentage": 96.77, "elapsed_time": "3:53:23", "remaining_time": "0:07:46"}
142
+ {"current_steps": 1420, "total_steps": 1457, "loss": 0.5287, "accuracy": 0.7593750357627869, "lr": 1.4492753623188406e-08, "epoch": 0.9750236031241953, "percentage": 97.46, "elapsed_time": "3:54:59", "remaining_time": "0:06:07"}
143
+ {"current_steps": 1430, "total_steps": 1457, "loss": 0.5281, "accuracy": 0.75, "lr": 1.0678871090770404e-08, "epoch": 0.9818899665264784, "percentage": 98.15, "elapsed_time": "3:56:40", "remaining_time": "0:04:28"}
144
+ {"current_steps": 1440, "total_steps": 1457, "loss": 0.5302, "accuracy": 0.7250000238418579, "lr": 6.864988558352402e-09, "epoch": 0.9887563299287615, "percentage": 98.83, "elapsed_time": "3:58:16", "remaining_time": "0:02:48"}
145
+ {"current_steps": 1450, "total_steps": 1457, "loss": 0.518, "accuracy": 0.7562500238418579, "lr": 3.0511060259344012e-09, "epoch": 0.9956226933310446, "percentage": 99.52, "elapsed_time": "3:59:53", "remaining_time": "0:01:09"}
146
+ {"current_steps": 1457, "total_steps": 1457, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "4:01:16", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1457,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.006866363402283066,
14
+ "grad_norm": 40.24618346360798,
15
+ "learning_rate": 3.082191780821918e-08,
16
+ "logits/chosen": 0.42846742272377014,
17
+ "logits/rejected": 0.45268937945365906,
18
+ "logps/chosen": -183.30755615234375,
19
+ "logps/rejected": -195.22784423828125,
20
+ "loss": 0.6932,
21
+ "rewards/accuracies": 0.4124999940395355,
22
+ "rewards/chosen": 0.0004921938525512815,
23
+ "rewards/margins": 0.0011830208823084831,
24
+ "rewards/rejected": -0.0006908266805112362,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.013732726804566131,
29
+ "grad_norm": 42.45940511830856,
30
+ "learning_rate": 6.506849315068492e-08,
31
+ "logits/chosen": 0.378580778837204,
32
+ "logits/rejected": 0.34625354409217834,
33
+ "logps/chosen": -184.8052978515625,
34
+ "logps/rejected": -188.45614624023438,
35
+ "loss": 0.6937,
36
+ "rewards/accuracies": 0.515625,
37
+ "rewards/chosen": -0.00026374688604846597,
38
+ "rewards/margins": 0.0003242385573685169,
39
+ "rewards/rejected": -0.0005879853270016611,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.020599090206849198,
44
+ "grad_norm": 41.66964168493646,
45
+ "learning_rate": 9.931506849315068e-08,
46
+ "logits/chosen": 0.35301661491394043,
47
+ "logits/rejected": 0.4009673595428467,
48
+ "logps/chosen": -192.31698608398438,
49
+ "logps/rejected": -194.3849639892578,
50
+ "loss": 0.6923,
51
+ "rewards/accuracies": 0.534375011920929,
52
+ "rewards/chosen": -0.0016289607156068087,
53
+ "rewards/margins": 0.0033018598333001137,
54
+ "rewards/rejected": -0.004930821247398853,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.027465453609132263,
59
+ "grad_norm": 39.249039173578126,
60
+ "learning_rate": 1.3356164383561644e-07,
61
+ "logits/chosen": 0.31915441155433655,
62
+ "logits/rejected": 0.3933267593383789,
63
+ "logps/chosen": -197.9785614013672,
64
+ "logps/rejected": -193.6060333251953,
65
+ "loss": 0.6947,
66
+ "rewards/accuracies": 0.48750001192092896,
67
+ "rewards/chosen": 0.0002039932005573064,
68
+ "rewards/margins": -0.0016961463261395693,
69
+ "rewards/rejected": 0.0019001394975930452,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.03433181701141533,
74
+ "grad_norm": 40.90604754389793,
75
+ "learning_rate": 1.6780821917808218e-07,
76
+ "logits/chosen": 0.35795748233795166,
77
+ "logits/rejected": 0.4378845691680908,
78
+ "logps/chosen": -187.54156494140625,
79
+ "logps/rejected": -189.99392700195312,
80
+ "loss": 0.6941,
81
+ "rewards/accuracies": 0.5234375,
82
+ "rewards/chosen": 0.0017753265565261245,
83
+ "rewards/margins": -0.00030706170946359634,
84
+ "rewards/rejected": 0.0020823883824050426,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.041198180413698396,
89
+ "grad_norm": 40.02800677179251,
90
+ "learning_rate": 2.0205479452054795e-07,
91
+ "logits/chosen": 0.34263893961906433,
92
+ "logits/rejected": 0.381570965051651,
93
+ "logps/chosen": -192.94613647460938,
94
+ "logps/rejected": -191.47396850585938,
95
+ "loss": 0.6959,
96
+ "rewards/accuracies": 0.4906250238418579,
97
+ "rewards/chosen": -0.0020632040686905384,
98
+ "rewards/margins": -0.00374617800116539,
99
+ "rewards/rejected": 0.0016829746309667826,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.048064543815981464,
104
+ "grad_norm": 44.64464771956862,
105
+ "learning_rate": 2.363013698630137e-07,
106
+ "logits/chosen": 0.367872029542923,
107
+ "logits/rejected": 0.38009101152420044,
108
+ "logps/chosen": -191.12225341796875,
109
+ "logps/rejected": -192.7696990966797,
110
+ "loss": 0.6928,
111
+ "rewards/accuracies": 0.49375003576278687,
112
+ "rewards/chosen": 0.00574580579996109,
113
+ "rewards/margins": 0.0022611478343605995,
114
+ "rewards/rejected": 0.0034846579656004906,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.054930907218264526,
119
+ "grad_norm": 41.884882712897294,
120
+ "learning_rate": 2.7054794520547945e-07,
121
+ "logits/chosen": 0.3370077908039093,
122
+ "logits/rejected": 0.34699341654777527,
123
+ "logps/chosen": -188.7425537109375,
124
+ "logps/rejected": -190.76052856445312,
125
+ "loss": 0.6913,
126
+ "rewards/accuracies": 0.5218750238418579,
127
+ "rewards/chosen": 0.0027863490395247936,
128
+ "rewards/margins": 0.005139566957950592,
129
+ "rewards/rejected": -0.0023532179184257984,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.061797270620547594,
134
+ "grad_norm": 44.4056603091038,
135
+ "learning_rate": 3.047945205479452e-07,
136
+ "logits/chosen": 0.3108683228492737,
137
+ "logits/rejected": 0.3609522581100464,
138
+ "logps/chosen": -195.35147094726562,
139
+ "logps/rejected": -193.3247833251953,
140
+ "loss": 0.6931,
141
+ "rewards/accuracies": 0.5218749642372131,
142
+ "rewards/chosen": 0.00044549518497660756,
143
+ "rewards/margins": 0.0018838731339201331,
144
+ "rewards/rejected": -0.0014383781235665083,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.06866363402283066,
149
+ "grad_norm": 41.857792639398546,
150
+ "learning_rate": 3.39041095890411e-07,
151
+ "logits/chosen": 0.3763309121131897,
152
+ "logits/rejected": 0.3922409415245056,
153
+ "logps/chosen": -193.8089141845703,
154
+ "logps/rejected": -198.68551635742188,
155
+ "loss": 0.6899,
156
+ "rewards/accuracies": 0.5281250476837158,
157
+ "rewards/chosen": 0.005990107078105211,
158
+ "rewards/margins": 0.008157256990671158,
159
+ "rewards/rejected": -0.0021671501453965902,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07552999742511372,
164
+ "grad_norm": 41.62980958107549,
165
+ "learning_rate": 3.7328767123287667e-07,
166
+ "logits/chosen": 0.3591919541358948,
167
+ "logits/rejected": 0.39655131101608276,
168
+ "logps/chosen": -196.92422485351562,
169
+ "logps/rejected": -197.28872680664062,
170
+ "loss": 0.689,
171
+ "rewards/accuracies": 0.546875,
172
+ "rewards/chosen": 0.004954576026648283,
173
+ "rewards/margins": 0.010082172229886055,
174
+ "rewards/rejected": -0.005127596668899059,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.08239636082739679,
179
+ "grad_norm": 43.693807356131735,
180
+ "learning_rate": 4.0753424657534246e-07,
181
+ "logits/chosen": 0.3348645865917206,
182
+ "logits/rejected": 0.35767242312431335,
183
+ "logps/chosen": -198.3641357421875,
184
+ "logps/rejected": -203.7303009033203,
185
+ "loss": 0.6885,
186
+ "rewards/accuracies": 0.5750000476837158,
187
+ "rewards/chosen": 0.005779502913355827,
188
+ "rewards/margins": 0.01143658347427845,
189
+ "rewards/rejected": -0.005657079629600048,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08926272422967986,
194
+ "grad_norm": 40.57954536185725,
195
+ "learning_rate": 4.417808219178082e-07,
196
+ "logits/chosen": 0.336916983127594,
197
+ "logits/rejected": 0.3446354866027832,
198
+ "logps/chosen": -193.88165283203125,
199
+ "logps/rejected": -191.94683837890625,
200
+ "loss": 0.6842,
201
+ "rewards/accuracies": 0.606249988079071,
202
+ "rewards/chosen": 0.010509985499083996,
203
+ "rewards/margins": 0.020326683297753334,
204
+ "rewards/rejected": -0.009816695004701614,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09612908763196293,
209
+ "grad_norm": 41.15941757414013,
210
+ "learning_rate": 4.7602739726027394e-07,
211
+ "logits/chosen": 0.35713282227516174,
212
+ "logits/rejected": 0.3554469347000122,
213
+ "logps/chosen": -192.0938720703125,
214
+ "logps/rejected": -192.8302001953125,
215
+ "loss": 0.686,
216
+ "rewards/accuracies": 0.5390625596046448,
217
+ "rewards/chosen": -0.0002686302177608013,
218
+ "rewards/margins": 0.017483647912740707,
219
+ "rewards/rejected": -0.017752276733517647,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.10299545103424598,
224
+ "grad_norm": 42.74829745936997,
225
+ "learning_rate": 4.988558352402745e-07,
226
+ "logits/chosen": 0.3553549349308014,
227
+ "logits/rejected": 0.36016494035720825,
228
+ "logps/chosen": -189.06460571289062,
229
+ "logps/rejected": -189.97903442382812,
230
+ "loss": 0.6797,
231
+ "rewards/accuracies": 0.6140625476837158,
232
+ "rewards/chosen": 0.008752593770623207,
233
+ "rewards/margins": 0.030557716265320778,
234
+ "rewards/rejected": -0.02180512063205242,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.10986181443652905,
239
+ "grad_norm": 35.871465251274834,
240
+ "learning_rate": 4.950419527078566e-07,
241
+ "logits/chosen": 0.43320149183273315,
242
+ "logits/rejected": 0.41380226612091064,
243
+ "logps/chosen": -181.6295623779297,
244
+ "logps/rejected": -186.4168701171875,
245
+ "loss": 0.6782,
246
+ "rewards/accuracies": 0.6109375357627869,
247
+ "rewards/chosen": 0.0026512315962463617,
248
+ "rewards/margins": 0.03445584699511528,
249
+ "rewards/rejected": -0.03180461376905441,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.11672817783881212,
254
+ "grad_norm": 43.14214265722976,
255
+ "learning_rate": 4.912280701754385e-07,
256
+ "logits/chosen": 0.3183770477771759,
257
+ "logits/rejected": 0.3852064311504364,
258
+ "logps/chosen": -198.97998046875,
259
+ "logps/rejected": -195.97885131835938,
260
+ "loss": 0.6774,
261
+ "rewards/accuracies": 0.604687511920929,
262
+ "rewards/chosen": -0.0013574912445619702,
263
+ "rewards/margins": 0.03735988214612007,
264
+ "rewards/rejected": -0.038717374205589294,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.12359454124109519,
269
+ "grad_norm": 41.435109809126814,
270
+ "learning_rate": 4.874141876430206e-07,
271
+ "logits/chosen": 0.3532235324382782,
272
+ "logits/rejected": 0.37155628204345703,
273
+ "logps/chosen": -190.45042419433594,
274
+ "logps/rejected": -189.18121337890625,
275
+ "loss": 0.67,
276
+ "rewards/accuracies": 0.6343750357627869,
277
+ "rewards/chosen": 0.0023425191175192595,
278
+ "rewards/margins": 0.05440489947795868,
279
+ "rewards/rejected": -0.05206237733364105,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.13046090464337826,
284
+ "grad_norm": 39.447517017039274,
285
+ "learning_rate": 4.836003051106026e-07,
286
+ "logits/chosen": 0.347377210855484,
287
+ "logits/rejected": 0.3284303843975067,
288
+ "logps/chosen": -194.7428741455078,
289
+ "logps/rejected": -194.8145294189453,
290
+ "loss": 0.6723,
291
+ "rewards/accuracies": 0.573437511920929,
292
+ "rewards/chosen": 0.0014576537068933249,
293
+ "rewards/margins": 0.051343731582164764,
294
+ "rewards/rejected": -0.049886077642440796,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.1373272680456613,
299
+ "grad_norm": 38.847764648617996,
300
+ "learning_rate": 4.797864225781846e-07,
301
+ "logits/chosen": 0.3240855932235718,
302
+ "logits/rejected": 0.35028141736984253,
303
+ "logps/chosen": -195.90789794921875,
304
+ "logps/rejected": -196.3568878173828,
305
+ "loss": 0.6638,
306
+ "rewards/accuracies": 0.6343750357627869,
307
+ "rewards/chosen": -0.016327721998095512,
308
+ "rewards/margins": 0.07191106677055359,
309
+ "rewards/rejected": -0.08823878318071365,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.1441936314479444,
314
+ "grad_norm": 40.58275358830573,
315
+ "learning_rate": 4.759725400457666e-07,
316
+ "logits/chosen": 0.3534795641899109,
317
+ "logits/rejected": 0.4051111340522766,
318
+ "logps/chosen": -189.0966796875,
319
+ "logps/rejected": -194.59945678710938,
320
+ "loss": 0.666,
321
+ "rewards/accuracies": 0.6312500238418579,
322
+ "rewards/chosen": -0.030735965818166733,
323
+ "rewards/margins": 0.06700534373521805,
324
+ "rewards/rejected": -0.09774130582809448,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.15105999485022745,
329
+ "grad_norm": 45.10759256069824,
330
+ "learning_rate": 4.7215865751334857e-07,
331
+ "logits/chosen": 0.3427739143371582,
332
+ "logits/rejected": 0.3563682734966278,
333
+ "logps/chosen": -194.303955078125,
334
+ "logps/rejected": -196.68199157714844,
335
+ "loss": 0.6633,
336
+ "rewards/accuracies": 0.6296875476837158,
337
+ "rewards/chosen": -0.029208209365606308,
338
+ "rewards/margins": 0.0735444575548172,
339
+ "rewards/rejected": -0.1027526706457138,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.1579263582525105,
344
+ "grad_norm": 37.81921037823293,
345
+ "learning_rate": 4.6834477498093057e-07,
346
+ "logits/chosen": 0.3555576801300049,
347
+ "logits/rejected": 0.36491692066192627,
348
+ "logps/chosen": -188.83424377441406,
349
+ "logps/rejected": -192.81069946289062,
350
+ "loss": 0.6537,
351
+ "rewards/accuracies": 0.6484375,
352
+ "rewards/chosen": -0.04094560444355011,
353
+ "rewards/margins": 0.0983317419886589,
354
+ "rewards/rejected": -0.1392773538827896,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.16479272165479358,
359
+ "grad_norm": 41.32206557637078,
360
+ "learning_rate": 4.6453089244851257e-07,
361
+ "logits/chosen": 0.39804917573928833,
362
+ "logits/rejected": 0.36356666684150696,
363
+ "logps/chosen": -194.4440155029297,
364
+ "logps/rejected": -200.01412963867188,
365
+ "loss": 0.6514,
366
+ "rewards/accuracies": 0.6343750357627869,
367
+ "rewards/chosen": -0.04057128727436066,
368
+ "rewards/margins": 0.10563759505748749,
369
+ "rewards/rejected": -0.14620888233184814,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17165908505707664,
374
+ "grad_norm": 39.276473772360774,
375
+ "learning_rate": 4.607170099160946e-07,
376
+ "logits/chosen": 0.33169448375701904,
377
+ "logits/rejected": 0.3752690553665161,
378
+ "logps/chosen": -196.81478881835938,
379
+ "logps/rejected": -201.34481811523438,
380
+ "loss": 0.6452,
381
+ "rewards/accuracies": 0.6781250238418579,
382
+ "rewards/chosen": -0.03887462615966797,
383
+ "rewards/margins": 0.12037336826324463,
384
+ "rewards/rejected": -0.1592479944229126,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.17852544845935972,
389
+ "grad_norm": 47.0610078724711,
390
+ "learning_rate": 4.569031273836766e-07,
391
+ "logits/chosen": 0.34487199783325195,
392
+ "logits/rejected": 0.3575427532196045,
393
+ "logps/chosen": -193.79837036132812,
394
+ "logps/rejected": -197.2197265625,
395
+ "loss": 0.6508,
396
+ "rewards/accuracies": 0.6421875357627869,
397
+ "rewards/chosen": -0.09015019237995148,
398
+ "rewards/margins": 0.11270132660865784,
399
+ "rewards/rejected": -0.20285151898860931,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.18539181186164277,
404
+ "grad_norm": 38.626969264563606,
405
+ "learning_rate": 4.5308924485125853e-07,
406
+ "logits/chosen": 0.32855209708213806,
407
+ "logits/rejected": 0.3907463252544403,
408
+ "logps/chosen": -191.17459106445312,
409
+ "logps/rejected": -189.3603057861328,
410
+ "loss": 0.6475,
411
+ "rewards/accuracies": 0.6484375,
412
+ "rewards/chosen": -0.07459260523319244,
413
+ "rewards/margins": 0.11734659224748611,
414
+ "rewards/rejected": -0.19193920493125916,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19225817526392586,
419
+ "grad_norm": 40.103707367823155,
420
+ "learning_rate": 4.4927536231884053e-07,
421
+ "logits/chosen": 0.3284029960632324,
422
+ "logits/rejected": 0.3626914918422699,
423
+ "logps/chosen": -192.84405517578125,
424
+ "logps/rejected": -194.70567321777344,
425
+ "loss": 0.6457,
426
+ "rewards/accuracies": 0.6656250357627869,
427
+ "rewards/chosen": -0.07513514161109924,
428
+ "rewards/margins": 0.12614111602306366,
429
+ "rewards/rejected": -0.2012762576341629,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.1991245386662089,
434
+ "grad_norm": 40.1951811593668,
435
+ "learning_rate": 4.454614797864226e-07,
436
+ "logits/chosen": 0.34982651472091675,
437
+ "logits/rejected": 0.3777983486652374,
438
+ "logps/chosen": -194.1185760498047,
439
+ "logps/rejected": -201.16836547851562,
440
+ "loss": 0.6373,
441
+ "rewards/accuracies": 0.6484375596046448,
442
+ "rewards/chosen": -0.062012314796447754,
443
+ "rewards/margins": 0.14990699291229248,
444
+ "rewards/rejected": -0.21191930770874023,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.20599090206849197,
449
+ "grad_norm": 40.613600115901605,
450
+ "learning_rate": 4.416475972540046e-07,
451
+ "logits/chosen": 0.30782297253608704,
452
+ "logits/rejected": 0.3243838846683502,
453
+ "logps/chosen": -201.00067138671875,
454
+ "logps/rejected": -197.9466552734375,
455
+ "loss": 0.629,
456
+ "rewards/accuracies": 0.703125,
457
+ "rewards/chosen": -0.02788611315190792,
458
+ "rewards/margins": 0.17499074339866638,
459
+ "rewards/rejected": -0.20287683606147766,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.21285726547077505,
464
+ "grad_norm": 38.45809132820782,
465
+ "learning_rate": 4.3783371472158654e-07,
466
+ "logits/chosen": 0.293393611907959,
467
+ "logits/rejected": 0.34445005655288696,
468
+ "logps/chosen": -190.18313598632812,
469
+ "logps/rejected": -192.75411987304688,
470
+ "loss": 0.6295,
471
+ "rewards/accuracies": 0.6859375238418579,
472
+ "rewards/chosen": -0.031520403921604156,
473
+ "rewards/margins": 0.16698937118053436,
474
+ "rewards/rejected": -0.19850978255271912,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.2197236288730581,
479
+ "grad_norm": 38.34602578772718,
480
+ "learning_rate": 4.3401983218916855e-07,
481
+ "logits/chosen": 0.2906053066253662,
482
+ "logits/rejected": 0.34385767579078674,
483
+ "logps/chosen": -194.83795166015625,
484
+ "logps/rejected": -195.7698211669922,
485
+ "loss": 0.6173,
486
+ "rewards/accuracies": 0.671875,
487
+ "rewards/chosen": -0.01768593303859234,
488
+ "rewards/margins": 0.21204118430614471,
489
+ "rewards/rejected": -0.2297271192073822,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.22658999227534118,
494
+ "grad_norm": 42.74391582277574,
495
+ "learning_rate": 4.3020594965675055e-07,
496
+ "logits/chosen": 0.30266571044921875,
497
+ "logits/rejected": 0.3389095067977905,
498
+ "logps/chosen": -198.25233459472656,
499
+ "logps/rejected": -200.7266082763672,
500
+ "loss": 0.6263,
501
+ "rewards/accuracies": 0.6734375357627869,
502
+ "rewards/chosen": -0.0333101749420166,
503
+ "rewards/margins": 0.19498664140701294,
504
+ "rewards/rejected": -0.22829681634902954,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.23345635567762424,
509
+ "grad_norm": 43.615077922449544,
510
+ "learning_rate": 4.2639206712433255e-07,
511
+ "logits/chosen": 0.30118876695632935,
512
+ "logits/rejected": 0.34083983302116394,
513
+ "logps/chosen": -196.76663208007812,
514
+ "logps/rejected": -196.46299743652344,
515
+ "loss": 0.6296,
516
+ "rewards/accuracies": 0.6640625,
517
+ "rewards/chosen": -0.011256666854023933,
518
+ "rewards/margins": 0.1830688714981079,
519
+ "rewards/rejected": -0.1943255364894867,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.2403227190799073,
524
+ "grad_norm": 37.24751529917733,
525
+ "learning_rate": 4.2257818459191456e-07,
526
+ "logits/chosen": 0.25924569368362427,
527
+ "logits/rejected": 0.283855676651001,
528
+ "logps/chosen": -183.45191955566406,
529
+ "logps/rejected": -187.04440307617188,
530
+ "loss": 0.633,
531
+ "rewards/accuracies": 0.6625000238418579,
532
+ "rewards/chosen": -0.015273300930857658,
533
+ "rewards/margins": 0.17985355854034424,
534
+ "rewards/rejected": -0.19512686133384705,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.24718908248219038,
539
+ "grad_norm": 40.58954818497333,
540
+ "learning_rate": 4.187643020594965e-07,
541
+ "logits/chosen": 0.28448960185050964,
542
+ "logits/rejected": 0.33332377672195435,
543
+ "logps/chosen": -193.56716918945312,
544
+ "logps/rejected": -197.53184509277344,
545
+ "loss": 0.6223,
546
+ "rewards/accuracies": 0.690625011920929,
547
+ "rewards/chosen": -0.048774123191833496,
548
+ "rewards/margins": 0.20520111918449402,
549
+ "rewards/rejected": -0.2539752423763275,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.25405544588447343,
554
+ "grad_norm": 36.17378532660478,
555
+ "learning_rate": 4.1495041952707856e-07,
556
+ "logits/chosen": 0.2762497067451477,
557
+ "logits/rejected": 0.2907570004463196,
558
+ "logps/chosen": -188.5650634765625,
559
+ "logps/rejected": -190.88734436035156,
560
+ "loss": 0.6137,
561
+ "rewards/accuracies": 0.6890624761581421,
562
+ "rewards/chosen": -0.05067811906337738,
563
+ "rewards/margins": 0.2351914346218109,
564
+ "rewards/rejected": -0.2858695387840271,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.2609218092867565,
569
+ "grad_norm": 37.43784314884923,
570
+ "learning_rate": 4.1113653699466057e-07,
571
+ "logits/chosen": 0.31868118047714233,
572
+ "logits/rejected": 0.3551029562950134,
573
+ "logps/chosen": -194.93936157226562,
574
+ "logps/rejected": -198.65814208984375,
575
+ "loss": 0.6003,
576
+ "rewards/accuracies": 0.7046874761581421,
577
+ "rewards/chosen": -0.0772961899638176,
578
+ "rewards/margins": 0.2751655578613281,
579
+ "rewards/rejected": -0.35246172547340393,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.2677881726890396,
584
+ "grad_norm": 35.91361143227307,
585
+ "learning_rate": 4.0732265446224257e-07,
586
+ "logits/chosen": 0.3169372081756592,
587
+ "logits/rejected": 0.36260250210762024,
588
+ "logps/chosen": -201.2320556640625,
589
+ "logps/rejected": -202.763427734375,
590
+ "loss": 0.613,
591
+ "rewards/accuracies": 0.6781250238418579,
592
+ "rewards/chosen": -0.09429500252008438,
593
+ "rewards/margins": 0.24527618288993835,
594
+ "rewards/rejected": -0.3395712077617645,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.2746545360913226,
599
+ "grad_norm": 42.53335069773125,
600
+ "learning_rate": 4.035087719298245e-07,
601
+ "logits/chosen": 0.31207048892974854,
602
+ "logits/rejected": 0.3457629680633545,
603
+ "logps/chosen": -206.29269409179688,
604
+ "logps/rejected": -201.12423706054688,
605
+ "loss": 0.601,
606
+ "rewards/accuracies": 0.7109375,
607
+ "rewards/chosen": -0.08769121766090393,
608
+ "rewards/margins": 0.2706546187400818,
609
+ "rewards/rejected": -0.3583458364009857,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.2815208994936057,
614
+ "grad_norm": 41.29813511944604,
615
+ "learning_rate": 3.996948893974065e-07,
616
+ "logits/chosen": 0.2636485993862152,
617
+ "logits/rejected": 0.31149038672447205,
618
+ "logps/chosen": -194.1279296875,
619
+ "logps/rejected": -194.5345001220703,
620
+ "loss": 0.5947,
621
+ "rewards/accuracies": 0.703125,
622
+ "rewards/chosen": -0.05628068745136261,
623
+ "rewards/margins": 0.2829420864582062,
624
+ "rewards/rejected": -0.33922278881073,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.2883872628958888,
629
+ "grad_norm": 37.77093581368485,
630
+ "learning_rate": 3.9588100686498853e-07,
631
+ "logits/chosen": 0.31741800904273987,
632
+ "logits/rejected": 0.32371169328689575,
633
+ "logps/chosen": -190.85951232910156,
634
+ "logps/rejected": -198.15396118164062,
635
+ "loss": 0.609,
636
+ "rewards/accuracies": 0.6843750476837158,
637
+ "rewards/chosen": -0.12272913008928299,
638
+ "rewards/margins": 0.2659768760204315,
639
+ "rewards/rejected": -0.3887060284614563,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.2952536262981718,
644
+ "grad_norm": 42.18074374760277,
645
+ "learning_rate": 3.9206712433257053e-07,
646
+ "logits/chosen": 0.2479156255722046,
647
+ "logits/rejected": 0.2833534777164459,
648
+ "logps/chosen": -195.24853515625,
649
+ "logps/rejected": -199.1217498779297,
650
+ "loss": 0.6196,
651
+ "rewards/accuracies": 0.671875,
652
+ "rewards/chosen": -0.09873144328594208,
653
+ "rewards/margins": 0.23723819851875305,
654
+ "rewards/rejected": -0.3359696567058563,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.3021199897004549,
659
+ "grad_norm": 40.84692701651787,
660
+ "learning_rate": 3.882532418001526e-07,
661
+ "logits/chosen": 0.26803913712501526,
662
+ "logits/rejected": 0.2534467577934265,
663
+ "logps/chosen": -198.0133056640625,
664
+ "logps/rejected": -203.25198364257812,
665
+ "loss": 0.5946,
666
+ "rewards/accuracies": 0.6937500238418579,
667
+ "rewards/chosen": -0.07623399049043655,
668
+ "rewards/margins": 0.3022476136684418,
669
+ "rewards/rejected": -0.3784816265106201,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.308986353102738,
674
+ "grad_norm": 52.48738116894515,
675
+ "learning_rate": 3.8443935926773454e-07,
676
+ "logits/chosen": 0.2687249779701233,
677
+ "logits/rejected": 0.2839760184288025,
678
+ "logps/chosen": -190.901611328125,
679
+ "logps/rejected": -197.57106018066406,
680
+ "loss": 0.6148,
681
+ "rewards/accuracies": 0.684374988079071,
682
+ "rewards/chosen": -0.1082596629858017,
683
+ "rewards/margins": 0.2658810019493103,
684
+ "rewards/rejected": -0.3741406798362732,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.315852716505021,
689
+ "grad_norm": 41.567018649209494,
690
+ "learning_rate": 3.8062547673531654e-07,
691
+ "logits/chosen": 0.2616228461265564,
692
+ "logits/rejected": 0.31667307019233704,
693
+ "logps/chosen": -190.47607421875,
694
+ "logps/rejected": -192.07882690429688,
695
+ "loss": 0.5991,
696
+ "rewards/accuracies": 0.682812511920929,
697
+ "rewards/chosen": -0.09240880608558655,
698
+ "rewards/margins": 0.2821645140647888,
699
+ "rewards/rejected": -0.37457332015037537,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.3227190799073041,
704
+ "grad_norm": 35.499115046921446,
705
+ "learning_rate": 3.7681159420289855e-07,
706
+ "logits/chosen": 0.27905237674713135,
707
+ "logits/rejected": 0.31074196100234985,
708
+ "logps/chosen": -192.92088317871094,
709
+ "logps/rejected": -197.79124450683594,
710
+ "loss": 0.61,
711
+ "rewards/accuracies": 0.6734375357627869,
712
+ "rewards/chosen": -0.13386519253253937,
713
+ "rewards/margins": 0.27014487981796265,
714
+ "rewards/rejected": -0.4040100872516632,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.32958544330958717,
719
+ "grad_norm": 41.86761698270693,
720
+ "learning_rate": 3.7299771167048055e-07,
721
+ "logits/chosen": 0.26603832840919495,
722
+ "logits/rejected": 0.26051998138427734,
723
+ "logps/chosen": -190.23660278320312,
724
+ "logps/rejected": -197.73736572265625,
725
+ "loss": 0.5981,
726
+ "rewards/accuracies": 0.6875000596046448,
727
+ "rewards/chosen": -0.1415073573589325,
728
+ "rewards/margins": 0.31012141704559326,
729
+ "rewards/rejected": -0.45162874460220337,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.33645180671187025,
734
+ "grad_norm": 37.648693672476206,
735
+ "learning_rate": 3.691838291380625e-07,
736
+ "logits/chosen": 0.3558499813079834,
737
+ "logits/rejected": 0.3315980136394501,
738
+ "logps/chosen": -188.3795623779297,
739
+ "logps/rejected": -200.14048767089844,
740
+ "loss": 0.5837,
741
+ "rewards/accuracies": 0.7250000238418579,
742
+ "rewards/chosen": -0.1745017170906067,
743
+ "rewards/margins": 0.364481657743454,
744
+ "rewards/rejected": -0.5389833450317383,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.3433181701141533,
749
+ "grad_norm": 40.65037054074536,
750
+ "learning_rate": 3.653699466056445e-07,
751
+ "logits/chosen": 0.2540973722934723,
752
+ "logits/rejected": 0.2895161509513855,
753
+ "logps/chosen": -187.24462890625,
754
+ "logps/rejected": -193.23289489746094,
755
+ "loss": 0.5909,
756
+ "rewards/accuracies": 0.703125,
757
+ "rewards/chosen": -0.1618882715702057,
758
+ "rewards/margins": 0.3192650377750397,
759
+ "rewards/rejected": -0.48115330934524536,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.35018453351643636,
764
+ "grad_norm": 36.872109535629086,
765
+ "learning_rate": 3.615560640732265e-07,
766
+ "logits/chosen": 0.31113675236701965,
767
+ "logits/rejected": 0.3199848532676697,
768
+ "logps/chosen": -191.7982177734375,
769
+ "logps/rejected": -196.9516143798828,
770
+ "loss": 0.584,
771
+ "rewards/accuracies": 0.7109375,
772
+ "rewards/chosen": -0.18086068332195282,
773
+ "rewards/margins": 0.3591785430908203,
774
+ "rewards/rejected": -0.5400392413139343,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.35705089691871944,
779
+ "grad_norm": 37.857597108335746,
780
+ "learning_rate": 3.5774218154080856e-07,
781
+ "logits/chosen": 0.27571016550064087,
782
+ "logits/rejected": 0.33552512526512146,
783
+ "logps/chosen": -199.4322509765625,
784
+ "logps/rejected": -199.8485870361328,
785
+ "loss": 0.5673,
786
+ "rewards/accuracies": 0.7250000238418579,
787
+ "rewards/chosen": -0.16350007057189941,
788
+ "rewards/margins": 0.4010578989982605,
789
+ "rewards/rejected": -0.5645579695701599,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.36391726032100247,
794
+ "grad_norm": 37.54411288303357,
795
+ "learning_rate": 3.5392829900839057e-07,
796
+ "logits/chosen": 0.2737811803817749,
797
+ "logits/rejected": 0.30892500281333923,
798
+ "logps/chosen": -187.1866912841797,
799
+ "logps/rejected": -191.27017211914062,
800
+ "loss": 0.5954,
801
+ "rewards/accuracies": 0.684374988079071,
802
+ "rewards/chosen": -0.1845116764307022,
803
+ "rewards/margins": 0.33810287714004517,
804
+ "rewards/rejected": -0.5226145386695862,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.37078362372328555,
809
+ "grad_norm": 36.379608658695076,
810
+ "learning_rate": 3.501144164759725e-07,
811
+ "logits/chosen": 0.3017553389072418,
812
+ "logits/rejected": 0.33447474241256714,
813
+ "logps/chosen": -186.4514923095703,
814
+ "logps/rejected": -192.16998291015625,
815
+ "loss": 0.5762,
816
+ "rewards/accuracies": 0.7000000476837158,
817
+ "rewards/chosen": -0.15427324175834656,
818
+ "rewards/margins": 0.3679887056350708,
819
+ "rewards/rejected": -0.5222619771957397,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.37764998712556863,
824
+ "grad_norm": 34.175660697861545,
825
+ "learning_rate": 3.463005339435545e-07,
826
+ "logits/chosen": 0.27010437846183777,
827
+ "logits/rejected": 0.2763593792915344,
828
+ "logps/chosen": -195.02452087402344,
829
+ "logps/rejected": -202.46990966796875,
830
+ "loss": 0.563,
831
+ "rewards/accuracies": 0.7484375238418579,
832
+ "rewards/chosen": -0.20031140744686127,
833
+ "rewards/margins": 0.4455968141555786,
834
+ "rewards/rejected": -0.6459081768989563,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.3845163505278517,
839
+ "grad_norm": 42.17768587566113,
840
+ "learning_rate": 3.424866514111365e-07,
841
+ "logits/chosen": 0.2938051223754883,
842
+ "logits/rejected": 0.265284925699234,
843
+ "logps/chosen": -197.04212951660156,
844
+ "logps/rejected": -205.32717895507812,
845
+ "loss": 0.5741,
846
+ "rewards/accuracies": 0.699999988079071,
847
+ "rewards/chosen": -0.20310233533382416,
848
+ "rewards/margins": 0.4046763479709625,
849
+ "rewards/rejected": -0.6077786684036255,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.39138271393013474,
854
+ "grad_norm": 39.556354214822186,
855
+ "learning_rate": 3.3867276887871853e-07,
856
+ "logits/chosen": 0.31264281272888184,
857
+ "logits/rejected": 0.3123646676540375,
858
+ "logps/chosen": -189.451416015625,
859
+ "logps/rejected": -197.66497802734375,
860
+ "loss": 0.562,
861
+ "rewards/accuracies": 0.714062511920929,
862
+ "rewards/chosen": -0.17061220109462738,
863
+ "rewards/margins": 0.4390391707420349,
864
+ "rewards/rejected": -0.6096513867378235,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.3982490773324178,
869
+ "grad_norm": 33.057101262111715,
870
+ "learning_rate": 3.348588863463005e-07,
871
+ "logits/chosen": 0.28659966588020325,
872
+ "logits/rejected": 0.3126603662967682,
873
+ "logps/chosen": -185.3994903564453,
874
+ "logps/rejected": -191.210693359375,
875
+ "loss": 0.5746,
876
+ "rewards/accuracies": 0.7046874761581421,
877
+ "rewards/chosen": -0.20723019540309906,
878
+ "rewards/margins": 0.42978933453559875,
879
+ "rewards/rejected": -0.6370195150375366,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.4051154407347009,
884
+ "grad_norm": 40.67219189059665,
885
+ "learning_rate": 3.310450038138825e-07,
886
+ "logits/chosen": 0.34570857882499695,
887
+ "logits/rejected": 0.36093324422836304,
888
+ "logps/chosen": -191.6331787109375,
889
+ "logps/rejected": -195.85353088378906,
890
+ "loss": 0.574,
891
+ "rewards/accuracies": 0.7124999761581421,
892
+ "rewards/chosen": -0.24881573021411896,
893
+ "rewards/margins": 0.40310919284820557,
894
+ "rewards/rejected": -0.6519248485565186,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.41198180413698393,
899
+ "grad_norm": 32.27412195705655,
900
+ "learning_rate": 3.2723112128146454e-07,
901
+ "logits/chosen": 0.28392359614372253,
902
+ "logits/rejected": 0.3210703730583191,
903
+ "logps/chosen": -193.51974487304688,
904
+ "logps/rejected": -204.85496520996094,
905
+ "loss": 0.5756,
906
+ "rewards/accuracies": 0.7046875357627869,
907
+ "rewards/chosen": -0.27069714665412903,
908
+ "rewards/margins": 0.39500606060028076,
909
+ "rewards/rejected": -0.6657031774520874,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.418848167539267,
914
+ "grad_norm": 40.11541316987076,
915
+ "learning_rate": 3.2341723874904654e-07,
916
+ "logits/chosen": 0.2726914882659912,
917
+ "logits/rejected": 0.3119577467441559,
918
+ "logps/chosen": -199.31056213378906,
919
+ "logps/rejected": -204.21376037597656,
920
+ "loss": 0.5566,
921
+ "rewards/accuracies": 0.7421875,
922
+ "rewards/chosen": -0.2292744219303131,
923
+ "rewards/margins": 0.47578680515289307,
924
+ "rewards/rejected": -0.7050611972808838,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.4257145309415501,
929
+ "grad_norm": 38.57524526832705,
930
+ "learning_rate": 3.1960335621662854e-07,
931
+ "logits/chosen": 0.3519170880317688,
932
+ "logits/rejected": 0.32691216468811035,
933
+ "logps/chosen": -185.44210815429688,
934
+ "logps/rejected": -193.6503448486328,
935
+ "loss": 0.5954,
936
+ "rewards/accuracies": 0.667187511920929,
937
+ "rewards/chosen": -0.2960931956768036,
938
+ "rewards/margins": 0.3511514365673065,
939
+ "rewards/rejected": -0.6472446322441101,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.4325808943438331,
944
+ "grad_norm": 37.69383619234179,
945
+ "learning_rate": 3.157894736842105e-07,
946
+ "logits/chosen": 0.29799604415893555,
947
+ "logits/rejected": 0.34074339270591736,
948
+ "logps/chosen": -195.78738403320312,
949
+ "logps/rejected": -199.2224578857422,
950
+ "loss": 0.5581,
951
+ "rewards/accuracies": 0.7390625476837158,
952
+ "rewards/chosen": -0.25615453720092773,
953
+ "rewards/margins": 0.450039267539978,
954
+ "rewards/rejected": -0.7061938047409058,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.4394472577461162,
959
+ "grad_norm": 38.189321055235546,
960
+ "learning_rate": 3.119755911517925e-07,
961
+ "logits/chosen": 0.2873116135597229,
962
+ "logits/rejected": 0.3004646897315979,
963
+ "logps/chosen": -189.26136779785156,
964
+ "logps/rejected": -196.96090698242188,
965
+ "loss": 0.547,
966
+ "rewards/accuracies": 0.75,
967
+ "rewards/chosen": -0.2134401500225067,
968
+ "rewards/margins": 0.49057334661483765,
969
+ "rewards/rejected": -0.704013466835022,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.4463136211483993,
974
+ "grad_norm": 36.10179194685923,
975
+ "learning_rate": 3.081617086193745e-07,
976
+ "logits/chosen": 0.2833235263824463,
977
+ "logits/rejected": 0.29193115234375,
978
+ "logps/chosen": -192.39608764648438,
979
+ "logps/rejected": -196.04393005371094,
980
+ "loss": 0.5785,
981
+ "rewards/accuracies": 0.7093749642372131,
982
+ "rewards/chosen": -0.27462145686149597,
983
+ "rewards/margins": 0.42194679379463196,
984
+ "rewards/rejected": -0.6965682506561279,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.45317998455068237,
989
+ "grad_norm": 40.70569476126112,
990
+ "learning_rate": 3.043478260869565e-07,
991
+ "logits/chosen": 0.2833031415939331,
992
+ "logits/rejected": 0.3039618134498596,
993
+ "logps/chosen": -194.68643188476562,
994
+ "logps/rejected": -201.3513641357422,
995
+ "loss": 0.5592,
996
+ "rewards/accuracies": 0.7406249642372131,
997
+ "rewards/chosen": -0.24034196138381958,
998
+ "rewards/margins": 0.47991621494293213,
999
+ "rewards/rejected": -0.7202582359313965,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.4600463479529654,
1004
+ "grad_norm": 39.47730074592621,
1005
+ "learning_rate": 3.005339435545385e-07,
1006
+ "logits/chosen": 0.24101117253303528,
1007
+ "logits/rejected": 0.28705087304115295,
1008
+ "logps/chosen": -193.5571746826172,
1009
+ "logps/rejected": -200.22122192382812,
1010
+ "loss": 0.5636,
1011
+ "rewards/accuracies": 0.7359375357627869,
1012
+ "rewards/chosen": -0.2813783586025238,
1013
+ "rewards/margins": 0.4671448767185211,
1014
+ "rewards/rejected": -0.7485232353210449,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.4669127113552485,
1019
+ "grad_norm": 39.65241691610216,
1020
+ "learning_rate": 2.967200610221205e-07,
1021
+ "logits/chosen": 0.3657754063606262,
1022
+ "logits/rejected": 0.37657299637794495,
1023
+ "logps/chosen": -191.58316040039062,
1024
+ "logps/rejected": -199.84127807617188,
1025
+ "loss": 0.5671,
1026
+ "rewards/accuracies": 0.7421875,
1027
+ "rewards/chosen": -0.3193482756614685,
1028
+ "rewards/margins": 0.4501326382160187,
1029
+ "rewards/rejected": -0.7694809436798096,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.47377907475753156,
1034
+ "grad_norm": 38.57964300760157,
1035
+ "learning_rate": 2.929061784897025e-07,
1036
+ "logits/chosen": 0.23921574652194977,
1037
+ "logits/rejected": 0.35300254821777344,
1038
+ "logps/chosen": -201.5934295654297,
1039
+ "logps/rejected": -198.59539794921875,
1040
+ "loss": 0.5621,
1041
+ "rewards/accuracies": 0.7234375476837158,
1042
+ "rewards/chosen": -0.24311897158622742,
1043
+ "rewards/margins": 0.4885551631450653,
1044
+ "rewards/rejected": -0.7316741347312927,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.4806454381598146,
1049
+ "grad_norm": 37.6708105251052,
1050
+ "learning_rate": 2.890922959572845e-07,
1051
+ "logits/chosen": 0.35867607593536377,
1052
+ "logits/rejected": 0.3570886254310608,
1053
+ "logps/chosen": -196.69223022460938,
1054
+ "logps/rejected": -203.36892700195312,
1055
+ "loss": 0.5599,
1056
+ "rewards/accuracies": 0.703125,
1057
+ "rewards/chosen": -0.259895920753479,
1058
+ "rewards/margins": 0.5065343379974365,
1059
+ "rewards/rejected": -0.7664302587509155,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.48751180156209767,
1064
+ "grad_norm": 33.960668217462846,
1065
+ "learning_rate": 2.852784134248665e-07,
1066
+ "logits/chosen": 0.3202595114707947,
1067
+ "logits/rejected": 0.34629160165786743,
1068
+ "logps/chosen": -183.36761474609375,
1069
+ "logps/rejected": -190.17095947265625,
1070
+ "loss": 0.5553,
1071
+ "rewards/accuracies": 0.7093750238418579,
1072
+ "rewards/chosen": -0.29635459184646606,
1073
+ "rewards/margins": 0.4687589406967163,
1074
+ "rewards/rejected": -0.7651135921478271,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.49437816496438075,
1079
+ "grad_norm": 41.77915341763896,
1080
+ "learning_rate": 2.8146453089244847e-07,
1081
+ "logits/chosen": 0.3262189030647278,
1082
+ "logits/rejected": 0.37001171708106995,
1083
+ "logps/chosen": -197.56304931640625,
1084
+ "logps/rejected": -208.429443359375,
1085
+ "loss": 0.5597,
1086
+ "rewards/accuracies": 0.7156250476837158,
1087
+ "rewards/chosen": -0.3544178307056427,
1088
+ "rewards/margins": 0.4729451835155487,
1089
+ "rewards/rejected": -0.8273630142211914,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.5012445283666638,
1094
+ "grad_norm": 40.82775700536951,
1095
+ "learning_rate": 2.776506483600305e-07,
1096
+ "logits/chosen": 0.28899723291397095,
1097
+ "logits/rejected": 0.35643595457077026,
1098
+ "logps/chosen": -201.49356079101562,
1099
+ "logps/rejected": -203.23558044433594,
1100
+ "loss": 0.5693,
1101
+ "rewards/accuracies": 0.7124999761581421,
1102
+ "rewards/chosen": -0.3458954691886902,
1103
+ "rewards/margins": 0.4600406587123871,
1104
+ "rewards/rejected": -0.8059360980987549,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5081108917689469,
1109
+ "grad_norm": 33.71404588891465,
1110
+ "learning_rate": 2.738367658276125e-07,
1111
+ "logits/chosen": 0.2703500986099243,
1112
+ "logits/rejected": 0.3099355697631836,
1113
+ "logps/chosen": -193.55145263671875,
1114
+ "logps/rejected": -199.3096923828125,
1115
+ "loss": 0.5536,
1116
+ "rewards/accuracies": 0.7328125238418579,
1117
+ "rewards/chosen": -0.26327142119407654,
1118
+ "rewards/margins": 0.5031423568725586,
1119
+ "rewards/rejected": -0.766413688659668,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5149772551712299,
1124
+ "grad_norm": 33.075818317425,
1125
+ "learning_rate": 2.7002288329519454e-07,
1126
+ "logits/chosen": 0.2545013427734375,
1127
+ "logits/rejected": 0.3025552034378052,
1128
+ "logps/chosen": -193.26318359375,
1129
+ "logps/rejected": -200.33660888671875,
1130
+ "loss": 0.5538,
1131
+ "rewards/accuracies": 0.7281250357627869,
1132
+ "rewards/chosen": -0.27697205543518066,
1133
+ "rewards/margins": 0.4808192849159241,
1134
+ "rewards/rejected": -0.75779128074646,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.521843618573513,
1139
+ "grad_norm": 40.25992379877124,
1140
+ "learning_rate": 2.662090007627765e-07,
1141
+ "logits/chosen": 0.3036815822124481,
1142
+ "logits/rejected": 0.3631848394870758,
1143
+ "logps/chosen": -198.0908203125,
1144
+ "logps/rejected": -206.98089599609375,
1145
+ "loss": 0.5472,
1146
+ "rewards/accuracies": 0.7265625,
1147
+ "rewards/chosen": -0.2982957065105438,
1148
+ "rewards/margins": 0.5238938927650452,
1149
+ "rewards/rejected": -0.8221895694732666,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5287099819757961,
1154
+ "grad_norm": 37.9070748761064,
1155
+ "learning_rate": 2.623951182303585e-07,
1156
+ "logits/chosen": 0.24659216403961182,
1157
+ "logits/rejected": 0.3046765625476837,
1158
+ "logps/chosen": -201.07363891601562,
1159
+ "logps/rejected": -207.00537109375,
1160
+ "loss": 0.5449,
1161
+ "rewards/accuracies": 0.731249988079071,
1162
+ "rewards/chosen": -0.26615357398986816,
1163
+ "rewards/margins": 0.5252472758293152,
1164
+ "rewards/rejected": -0.7914008498191833,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.5355763453780792,
1169
+ "grad_norm": 37.80873404676217,
1170
+ "learning_rate": 2.585812356979405e-07,
1171
+ "logits/chosen": 0.25920283794403076,
1172
+ "logits/rejected": 0.2916741371154785,
1173
+ "logps/chosen": -185.62496948242188,
1174
+ "logps/rejected": -192.09083557128906,
1175
+ "loss": 0.5401,
1176
+ "rewards/accuracies": 0.7203124761581421,
1177
+ "rewards/chosen": -0.26164376735687256,
1178
+ "rewards/margins": 0.5513469576835632,
1179
+ "rewards/rejected": -0.8129907846450806,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5424427087803622,
1184
+ "grad_norm": 42.53158233624783,
1185
+ "learning_rate": 2.547673531655225e-07,
1186
+ "logits/chosen": 0.2548454701900482,
1187
+ "logits/rejected": 0.328127920627594,
1188
+ "logps/chosen": -195.73593139648438,
1189
+ "logps/rejected": -201.92825317382812,
1190
+ "loss": 0.5675,
1191
+ "rewards/accuracies": 0.7015624642372131,
1192
+ "rewards/chosen": -0.33536800742149353,
1193
+ "rewards/margins": 0.4790363907814026,
1194
+ "rewards/rejected": -0.8144044280052185,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5493090721826452,
1199
+ "grad_norm": 36.58925507564145,
1200
+ "learning_rate": 2.509534706331045e-07,
1201
+ "logits/chosen": 0.2698169946670532,
1202
+ "logits/rejected": 0.3065064549446106,
1203
+ "logps/chosen": -202.49847412109375,
1204
+ "logps/rejected": -207.4984130859375,
1205
+ "loss": 0.5502,
1206
+ "rewards/accuracies": 0.7374999523162842,
1207
+ "rewards/chosen": -0.30143699049949646,
1208
+ "rewards/margins": 0.5463117957115173,
1209
+ "rewards/rejected": -0.8477488160133362,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5561754355849283,
1214
+ "grad_norm": 33.42536910745375,
1215
+ "learning_rate": 2.471395881006865e-07,
1216
+ "logits/chosen": 0.3269241154193878,
1217
+ "logits/rejected": 0.33714747428894043,
1218
+ "logps/chosen": -195.79061889648438,
1219
+ "logps/rejected": -204.3171844482422,
1220
+ "loss": 0.541,
1221
+ "rewards/accuracies": 0.7515624761581421,
1222
+ "rewards/chosen": -0.2959471344947815,
1223
+ "rewards/margins": 0.563454270362854,
1224
+ "rewards/rejected": -0.8594014644622803,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5630417989872114,
1229
+ "grad_norm": 38.40905751177336,
1230
+ "learning_rate": 2.4332570556826846e-07,
1231
+ "logits/chosen": 0.2773634195327759,
1232
+ "logits/rejected": 0.3108067512512207,
1233
+ "logps/chosen": -198.3325958251953,
1234
+ "logps/rejected": -204.81982421875,
1235
+ "loss": 0.5414,
1236
+ "rewards/accuracies": 0.7390625476837158,
1237
+ "rewards/chosen": -0.33161288499832153,
1238
+ "rewards/margins": 0.5616108179092407,
1239
+ "rewards/rejected": -0.8932236433029175,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.5699081623894945,
1244
+ "grad_norm": 43.36025952480035,
1245
+ "learning_rate": 2.395118230358505e-07,
1246
+ "logits/chosen": 0.3091006875038147,
1247
+ "logits/rejected": 0.3100830018520355,
1248
+ "logps/chosen": -194.6656036376953,
1249
+ "logps/rejected": -204.10427856445312,
1250
+ "loss": 0.5367,
1251
+ "rewards/accuracies": 0.7578125,
1252
+ "rewards/chosen": -0.3439427614212036,
1253
+ "rewards/margins": 0.5804582238197327,
1254
+ "rewards/rejected": -0.9244009256362915,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5767745257917776,
1259
+ "grad_norm": 44.03658723941333,
1260
+ "learning_rate": 2.3569794050343246e-07,
1261
+ "logits/chosen": 0.23637430369853973,
1262
+ "logits/rejected": 0.3134675920009613,
1263
+ "logps/chosen": -194.6787109375,
1264
+ "logps/rejected": -200.1925811767578,
1265
+ "loss": 0.5326,
1266
+ "rewards/accuracies": 0.7593749761581421,
1267
+ "rewards/chosen": -0.30673253536224365,
1268
+ "rewards/margins": 0.5752936005592346,
1269
+ "rewards/rejected": -0.882026195526123,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.5836408891940605,
1274
+ "grad_norm": 35.08372363656115,
1275
+ "learning_rate": 2.318840579710145e-07,
1276
+ "logits/chosen": 0.29405680298805237,
1277
+ "logits/rejected": 0.32297587394714355,
1278
+ "logps/chosen": -199.6582794189453,
1279
+ "logps/rejected": -201.277587890625,
1280
+ "loss": 0.5613,
1281
+ "rewards/accuracies": 0.7171875238418579,
1282
+ "rewards/chosen": -0.3533500134944916,
1283
+ "rewards/margins": 0.4787251353263855,
1284
+ "rewards/rejected": -0.8320751190185547,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.5905072525963436,
1289
+ "grad_norm": 37.57503746222717,
1290
+ "learning_rate": 2.2807017543859647e-07,
1291
+ "logits/chosen": 0.30716609954833984,
1292
+ "logits/rejected": 0.3208872079849243,
1293
+ "logps/chosen": -190.15570068359375,
1294
+ "logps/rejected": -197.3380584716797,
1295
+ "loss": 0.5404,
1296
+ "rewards/accuracies": 0.7421875,
1297
+ "rewards/chosen": -0.35594654083251953,
1298
+ "rewards/margins": 0.5726853609085083,
1299
+ "rewards/rejected": -0.9286319017410278,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.5973736159986267,
1304
+ "grad_norm": 38.24375810448517,
1305
+ "learning_rate": 2.2425629290617847e-07,
1306
+ "logits/chosen": 0.2411738932132721,
1307
+ "logits/rejected": 0.32163745164871216,
1308
+ "logps/chosen": -196.72198486328125,
1309
+ "logps/rejected": -200.0342254638672,
1310
+ "loss": 0.5283,
1311
+ "rewards/accuracies": 0.7718750238418579,
1312
+ "rewards/chosen": -0.26584312319755554,
1313
+ "rewards/margins": 0.5993839502334595,
1314
+ "rewards/rejected": -0.8652270436286926,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.6042399794009098,
1319
+ "grad_norm": 42.764478541441036,
1320
+ "learning_rate": 2.204424103737605e-07,
1321
+ "logits/chosen": 0.2489607334136963,
1322
+ "logits/rejected": 0.32214033603668213,
1323
+ "logps/chosen": -193.22962951660156,
1324
+ "logps/rejected": -198.15139770507812,
1325
+ "loss": 0.5364,
1326
+ "rewards/accuracies": 0.7265625596046448,
1327
+ "rewards/chosen": -0.2991304099559784,
1328
+ "rewards/margins": 0.5733505487442017,
1329
+ "rewards/rejected": -0.8724809885025024,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6111063428031929,
1334
+ "grad_norm": 36.23455470329296,
1335
+ "learning_rate": 2.1662852784134248e-07,
1336
+ "logits/chosen": 0.2716100215911865,
1337
+ "logits/rejected": 0.2950877547264099,
1338
+ "logps/chosen": -200.77000427246094,
1339
+ "logps/rejected": -207.59774780273438,
1340
+ "loss": 0.5617,
1341
+ "rewards/accuracies": 0.7203125357627869,
1342
+ "rewards/chosen": -0.35562682151794434,
1343
+ "rewards/margins": 0.5303334593772888,
1344
+ "rewards/rejected": -0.8859602808952332,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.617972706205476,
1349
+ "grad_norm": 36.85079943729679,
1350
+ "learning_rate": 2.1281464530892448e-07,
1351
+ "logits/chosen": 0.2935597598552704,
1352
+ "logits/rejected": 0.29276734590530396,
1353
+ "logps/chosen": -199.0962371826172,
1354
+ "logps/rejected": -208.84475708007812,
1355
+ "loss": 0.5388,
1356
+ "rewards/accuracies": 0.753125011920929,
1357
+ "rewards/chosen": -0.3592212498188019,
1358
+ "rewards/margins": 0.5828565955162048,
1359
+ "rewards/rejected": -0.9420778751373291,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.624839069607759,
1364
+ "grad_norm": 38.47165659930442,
1365
+ "learning_rate": 2.0900076277650646e-07,
1366
+ "logits/chosen": 0.322465717792511,
1367
+ "logits/rejected": 0.35115572810173035,
1368
+ "logps/chosen": -197.81039428710938,
1369
+ "logps/rejected": -203.60789489746094,
1370
+ "loss": 0.538,
1371
+ "rewards/accuracies": 0.721875011920929,
1372
+ "rewards/chosen": -0.35633325576782227,
1373
+ "rewards/margins": 0.5937191247940063,
1374
+ "rewards/rejected": -0.9500523805618286,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.631705433010042,
1379
+ "grad_norm": 37.96068046141611,
1380
+ "learning_rate": 2.051868802440885e-07,
1381
+ "logits/chosen": 0.2791150212287903,
1382
+ "logits/rejected": 0.31407302618026733,
1383
+ "logps/chosen": -205.3975372314453,
1384
+ "logps/rejected": -211.5294189453125,
1385
+ "loss": 0.5427,
1386
+ "rewards/accuracies": 0.7312500476837158,
1387
+ "rewards/chosen": -0.3240187168121338,
1388
+ "rewards/margins": 0.5994824767112732,
1389
+ "rewards/rejected": -0.923501193523407,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6385717964123251,
1394
+ "grad_norm": 36.87690959534342,
1395
+ "learning_rate": 2.0137299771167047e-07,
1396
+ "logits/chosen": 0.2880761921405792,
1397
+ "logits/rejected": 0.30180174112319946,
1398
+ "logps/chosen": -199.66058349609375,
1399
+ "logps/rejected": -207.59580993652344,
1400
+ "loss": 0.5025,
1401
+ "rewards/accuracies": 0.78125,
1402
+ "rewards/chosen": -0.3255097270011902,
1403
+ "rewards/margins": 0.6759095191955566,
1404
+ "rewards/rejected": -1.0014193058013916,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.6454381598146082,
1409
+ "grad_norm": 34.108839062919095,
1410
+ "learning_rate": 1.9755911517925247e-07,
1411
+ "logits/chosen": 0.23794284462928772,
1412
+ "logits/rejected": 0.2611067295074463,
1413
+ "logps/chosen": -195.78970336914062,
1414
+ "logps/rejected": -200.48919677734375,
1415
+ "loss": 0.5416,
1416
+ "rewards/accuracies": 0.7156250476837158,
1417
+ "rewards/chosen": -0.333574116230011,
1418
+ "rewards/margins": 0.5876989364624023,
1419
+ "rewards/rejected": -0.9212730526924133,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.6523045232168913,
1424
+ "grad_norm": 42.487643110620105,
1425
+ "learning_rate": 1.9374523264683445e-07,
1426
+ "logits/chosen": 0.21556894481182098,
1427
+ "logits/rejected": 0.3087506890296936,
1428
+ "logps/chosen": -203.08071899414062,
1429
+ "logps/rejected": -204.42138671875,
1430
+ "loss": 0.5358,
1431
+ "rewards/accuracies": 0.7281249761581421,
1432
+ "rewards/chosen": -0.3193817734718323,
1433
+ "rewards/margins": 0.6358938813209534,
1434
+ "rewards/rejected": -0.9552755951881409,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6591708866191743,
1439
+ "grad_norm": 44.89635861946585,
1440
+ "learning_rate": 1.8993135011441648e-07,
1441
+ "logits/chosen": 0.292077898979187,
1442
+ "logits/rejected": 0.3146917521953583,
1443
+ "logps/chosen": -194.001220703125,
1444
+ "logps/rejected": -202.2600860595703,
1445
+ "loss": 0.5448,
1446
+ "rewards/accuracies": 0.734375,
1447
+ "rewards/chosen": -0.3208439350128174,
1448
+ "rewards/margins": 0.5795982480049133,
1449
+ "rewards/rejected": -0.9004421830177307,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6660372500214574,
1454
+ "grad_norm": 33.551478970398286,
1455
+ "learning_rate": 1.8611746758199848e-07,
1456
+ "logits/chosen": 0.2604063153266907,
1457
+ "logits/rejected": 0.2851104736328125,
1458
+ "logps/chosen": -192.01268005371094,
1459
+ "logps/rejected": -199.5753631591797,
1460
+ "loss": 0.5278,
1461
+ "rewards/accuracies": 0.75,
1462
+ "rewards/chosen": -0.29537859559059143,
1463
+ "rewards/margins": 0.5901771187782288,
1464
+ "rewards/rejected": -0.8855556845664978,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6729036134237405,
1469
+ "grad_norm": 35.66870501940745,
1470
+ "learning_rate": 1.8230358504958046e-07,
1471
+ "logits/chosen": 0.28516054153442383,
1472
+ "logits/rejected": 0.30551978945732117,
1473
+ "logps/chosen": -191.11451721191406,
1474
+ "logps/rejected": -200.1798095703125,
1475
+ "loss": 0.5325,
1476
+ "rewards/accuracies": 0.7718750238418579,
1477
+ "rewards/chosen": -0.2689625322818756,
1478
+ "rewards/margins": 0.6204273104667664,
1479
+ "rewards/rejected": -0.8893898129463196,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6797699768260235,
1484
+ "grad_norm": 56.47041800124252,
1485
+ "learning_rate": 1.7848970251716246e-07,
1486
+ "logits/chosen": 0.3133455812931061,
1487
+ "logits/rejected": 0.3184555470943451,
1488
+ "logps/chosen": -191.03782653808594,
1489
+ "logps/rejected": -200.31556701660156,
1490
+ "loss": 0.5553,
1491
+ "rewards/accuracies": 0.746874988079071,
1492
+ "rewards/chosen": -0.3917931020259857,
1493
+ "rewards/margins": 0.5545149445533752,
1494
+ "rewards/rejected": -0.9463080763816833,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.6866363402283066,
1499
+ "grad_norm": 35.555828137231536,
1500
+ "learning_rate": 1.7467581998474446e-07,
1501
+ "logits/chosen": 0.2640990912914276,
1502
+ "logits/rejected": 0.2870061993598938,
1503
+ "logps/chosen": -200.83248901367188,
1504
+ "logps/rejected": -213.00466918945312,
1505
+ "loss": 0.5147,
1506
+ "rewards/accuracies": 0.760937511920929,
1507
+ "rewards/chosen": -0.3015330135822296,
1508
+ "rewards/margins": 0.6684325933456421,
1509
+ "rewards/rejected": -0.9699656367301941,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6935027036305896,
1514
+ "grad_norm": 34.088214821820934,
1515
+ "learning_rate": 1.7086193745232647e-07,
1516
+ "logits/chosen": 0.2753002345561981,
1517
+ "logits/rejected": 0.3184780478477478,
1518
+ "logps/chosen": -185.6627960205078,
1519
+ "logps/rejected": -191.78610229492188,
1520
+ "loss": 0.5326,
1521
+ "rewards/accuracies": 0.7406250238418579,
1522
+ "rewards/chosen": -0.3113631010055542,
1523
+ "rewards/margins": 0.584636390209198,
1524
+ "rewards/rejected": -0.8959994912147522,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.7003690670328727,
1529
+ "grad_norm": 41.3910211936423,
1530
+ "learning_rate": 1.6704805491990844e-07,
1531
+ "logits/chosen": 0.21735849976539612,
1532
+ "logits/rejected": 0.328865647315979,
1533
+ "logps/chosen": -196.0897674560547,
1534
+ "logps/rejected": -198.31881713867188,
1535
+ "loss": 0.5303,
1536
+ "rewards/accuracies": 0.7578125596046448,
1537
+ "rewards/chosen": -0.3108276128768921,
1538
+ "rewards/margins": 0.5797023773193359,
1539
+ "rewards/rejected": -0.8905300498008728,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.7072354304351558,
1544
+ "grad_norm": 35.79945052953454,
1545
+ "learning_rate": 1.6323417238749045e-07,
1546
+ "logits/chosen": 0.21715006232261658,
1547
+ "logits/rejected": 0.2603926956653595,
1548
+ "logps/chosen": -191.06532287597656,
1549
+ "logps/rejected": -196.79811096191406,
1550
+ "loss": 0.5397,
1551
+ "rewards/accuracies": 0.7359375357627869,
1552
+ "rewards/chosen": -0.3505193889141083,
1553
+ "rewards/margins": 0.5923134088516235,
1554
+ "rewards/rejected": -0.9428327083587646,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7141017938374389,
1559
+ "grad_norm": 38.89985342660555,
1560
+ "learning_rate": 1.5942028985507245e-07,
1561
+ "logits/chosen": 0.2547008693218231,
1562
+ "logits/rejected": 0.27041909098625183,
1563
+ "logps/chosen": -192.95919799804688,
1564
+ "logps/rejected": -201.5959014892578,
1565
+ "loss": 0.5401,
1566
+ "rewards/accuracies": 0.7515625357627869,
1567
+ "rewards/chosen": -0.31962740421295166,
1568
+ "rewards/margins": 0.6006041765213013,
1569
+ "rewards/rejected": -0.9202315211296082,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.720968157239722,
1574
+ "grad_norm": 38.89083856333277,
1575
+ "learning_rate": 1.5560640732265446e-07,
1576
+ "logits/chosen": 0.22286555171012878,
1577
+ "logits/rejected": 0.2649829685688019,
1578
+ "logps/chosen": -195.25892639160156,
1579
+ "logps/rejected": -201.61752319335938,
1580
+ "loss": 0.5287,
1581
+ "rewards/accuracies": 0.7562500238418579,
1582
+ "rewards/chosen": -0.2955860495567322,
1583
+ "rewards/margins": 0.6011725068092346,
1584
+ "rewards/rejected": -0.8967585563659668,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7278345206420049,
1589
+ "grad_norm": 38.82078220393796,
1590
+ "learning_rate": 1.5179252479023646e-07,
1591
+ "logits/chosen": 0.29512226581573486,
1592
+ "logits/rejected": 0.2847123146057129,
1593
+ "logps/chosen": -189.52247619628906,
1594
+ "logps/rejected": -195.29598999023438,
1595
+ "loss": 0.5515,
1596
+ "rewards/accuracies": 0.7124999761581421,
1597
+ "rewards/chosen": -0.2878785729408264,
1598
+ "rewards/margins": 0.5302765965461731,
1599
+ "rewards/rejected": -0.8181551694869995,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.734700884044288,
1604
+ "grad_norm": 47.26245042175549,
1605
+ "learning_rate": 1.4797864225781844e-07,
1606
+ "logits/chosen": 0.2918851971626282,
1607
+ "logits/rejected": 0.32630887627601624,
1608
+ "logps/chosen": -195.94786071777344,
1609
+ "logps/rejected": -203.26979064941406,
1610
+ "loss": 0.5169,
1611
+ "rewards/accuracies": 0.7484375238418579,
1612
+ "rewards/chosen": -0.3223276436328888,
1613
+ "rewards/margins": 0.6719238758087158,
1614
+ "rewards/rejected": -0.9942514896392822,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.7415672474465711,
1619
+ "grad_norm": 38.362363344324436,
1620
+ "learning_rate": 1.4416475972540047e-07,
1621
+ "logits/chosen": 0.2812820076942444,
1622
+ "logits/rejected": 0.3306478261947632,
1623
+ "logps/chosen": -195.67994689941406,
1624
+ "logps/rejected": -203.43797302246094,
1625
+ "loss": 0.5423,
1626
+ "rewards/accuracies": 0.737500011920929,
1627
+ "rewards/chosen": -0.34005221724510193,
1628
+ "rewards/margins": 0.5954278111457825,
1629
+ "rewards/rejected": -0.9354800581932068,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7484336108488542,
1634
+ "grad_norm": 33.005125056199134,
1635
+ "learning_rate": 1.4035087719298244e-07,
1636
+ "logits/chosen": 0.2390434741973877,
1637
+ "logits/rejected": 0.33480697870254517,
1638
+ "logps/chosen": -198.01687622070312,
1639
+ "logps/rejected": -199.43064880371094,
1640
+ "loss": 0.5251,
1641
+ "rewards/accuracies": 0.776562511920929,
1642
+ "rewards/chosen": -0.2991839051246643,
1643
+ "rewards/margins": 0.6372097134590149,
1644
+ "rewards/rejected": -0.9363937377929688,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7552999742511373,
1649
+ "grad_norm": 33.828040981930215,
1650
+ "learning_rate": 1.3653699466056445e-07,
1651
+ "logits/chosen": 0.27923381328582764,
1652
+ "logits/rejected": 0.29023945331573486,
1653
+ "logps/chosen": -206.89694213867188,
1654
+ "logps/rejected": -214.03880310058594,
1655
+ "loss": 0.4939,
1656
+ "rewards/accuracies": 0.7640625238418579,
1657
+ "rewards/chosen": -0.30125996470451355,
1658
+ "rewards/margins": 0.7326390147209167,
1659
+ "rewards/rejected": -1.033898949623108,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7621663376534203,
1664
+ "grad_norm": 31.87482658749529,
1665
+ "learning_rate": 1.3272311212814645e-07,
1666
+ "logits/chosen": 0.2957872152328491,
1667
+ "logits/rejected": 0.3066104054450989,
1668
+ "logps/chosen": -193.53025817871094,
1669
+ "logps/rejected": -201.29710388183594,
1670
+ "loss": 0.5141,
1671
+ "rewards/accuracies": 0.7546875476837158,
1672
+ "rewards/chosen": -0.2906218469142914,
1673
+ "rewards/margins": 0.6594946980476379,
1674
+ "rewards/rejected": -0.9501165151596069,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.7690327010557034,
1679
+ "grad_norm": 37.01925333474489,
1680
+ "learning_rate": 1.2890922959572845e-07,
1681
+ "logits/chosen": 0.23377490043640137,
1682
+ "logits/rejected": 0.28193050622940063,
1683
+ "logps/chosen": -203.5383758544922,
1684
+ "logps/rejected": -210.682373046875,
1685
+ "loss": 0.5309,
1686
+ "rewards/accuracies": 0.7312500476837158,
1687
+ "rewards/chosen": -0.3552318215370178,
1688
+ "rewards/margins": 0.6278497576713562,
1689
+ "rewards/rejected": -0.9830816388130188,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7758990644579864,
1694
+ "grad_norm": 38.86521392049953,
1695
+ "learning_rate": 1.2509534706331046e-07,
1696
+ "logits/chosen": 0.25305452942848206,
1697
+ "logits/rejected": 0.250664085149765,
1698
+ "logps/chosen": -193.17884826660156,
1699
+ "logps/rejected": -205.3461456298828,
1700
+ "loss": 0.5151,
1701
+ "rewards/accuracies": 0.7734375,
1702
+ "rewards/chosen": -0.3200224041938782,
1703
+ "rewards/margins": 0.7115429639816284,
1704
+ "rewards/rejected": -1.0315654277801514,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7827654278602695,
1709
+ "grad_norm": 41.16862629359382,
1710
+ "learning_rate": 1.2128146453089243e-07,
1711
+ "logits/chosen": 0.24563747644424438,
1712
+ "logits/rejected": 0.2821294665336609,
1713
+ "logps/chosen": -192.35116577148438,
1714
+ "logps/rejected": -197.296630859375,
1715
+ "loss": 0.5208,
1716
+ "rewards/accuracies": 0.7578125596046448,
1717
+ "rewards/chosen": -0.28378814458847046,
1718
+ "rewards/margins": 0.667680025100708,
1719
+ "rewards/rejected": -0.9514681696891785,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7896317912625526,
1724
+ "grad_norm": 42.06746375990814,
1725
+ "learning_rate": 1.1746758199847444e-07,
1726
+ "logits/chosen": 0.22848859429359436,
1727
+ "logits/rejected": 0.3029937744140625,
1728
+ "logps/chosen": -193.92227172851562,
1729
+ "logps/rejected": -200.07699584960938,
1730
+ "loss": 0.52,
1731
+ "rewards/accuracies": 0.7562500238418579,
1732
+ "rewards/chosen": -0.3132557272911072,
1733
+ "rewards/margins": 0.6495946049690247,
1734
+ "rewards/rejected": -0.9628503322601318,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.7964981546648356,
1739
+ "grad_norm": 32.39256423163034,
1740
+ "learning_rate": 1.1365369946605644e-07,
1741
+ "logits/chosen": 0.29020410776138306,
1742
+ "logits/rejected": 0.29822224378585815,
1743
+ "logps/chosen": -189.2175750732422,
1744
+ "logps/rejected": -198.12094116210938,
1745
+ "loss": 0.5272,
1746
+ "rewards/accuracies": 0.7500000596046448,
1747
+ "rewards/chosen": -0.3057502508163452,
1748
+ "rewards/margins": 0.6393308043479919,
1749
+ "rewards/rejected": -0.9450810551643372,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.8033645180671187,
1754
+ "grad_norm": 34.13298433674702,
1755
+ "learning_rate": 1.0983981693363843e-07,
1756
+ "logits/chosen": 0.195723757147789,
1757
+ "logits/rejected": 0.269298255443573,
1758
+ "logps/chosen": -196.38954162597656,
1759
+ "logps/rejected": -202.385986328125,
1760
+ "loss": 0.5052,
1761
+ "rewards/accuracies": 0.760937511920929,
1762
+ "rewards/chosen": -0.29148977994918823,
1763
+ "rewards/margins": 0.673367977142334,
1764
+ "rewards/rejected": -0.9648576974868774,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.8102308814694018,
1769
+ "grad_norm": 41.904351029623406,
1770
+ "learning_rate": 1.0602593440122045e-07,
1771
+ "logits/chosen": 0.26145869493484497,
1772
+ "logits/rejected": 0.304781049489975,
1773
+ "logps/chosen": -196.5819091796875,
1774
+ "logps/rejected": -206.65396118164062,
1775
+ "loss": 0.5308,
1776
+ "rewards/accuracies": 0.7718750238418579,
1777
+ "rewards/chosen": -0.3446430563926697,
1778
+ "rewards/margins": 0.6291292309761047,
1779
+ "rewards/rejected": -0.973772406578064,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8170972448716848,
1784
+ "grad_norm": 36.5024658205158,
1785
+ "learning_rate": 1.0221205186880244e-07,
1786
+ "logits/chosen": 0.22576144337654114,
1787
+ "logits/rejected": 0.2602446973323822,
1788
+ "logps/chosen": -188.89349365234375,
1789
+ "logps/rejected": -195.39523315429688,
1790
+ "loss": 0.5198,
1791
+ "rewards/accuracies": 0.7578125,
1792
+ "rewards/chosen": -0.23121029138565063,
1793
+ "rewards/margins": 0.6549311876296997,
1794
+ "rewards/rejected": -0.8861414790153503,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.8239636082739679,
1799
+ "grad_norm": 42.13359318077711,
1800
+ "learning_rate": 9.839816933638444e-08,
1801
+ "logits/chosen": 0.2552095055580139,
1802
+ "logits/rejected": 0.3024769127368927,
1803
+ "logps/chosen": -194.66673278808594,
1804
+ "logps/rejected": -198.9243621826172,
1805
+ "loss": 0.5135,
1806
+ "rewards/accuracies": 0.7640625238418579,
1807
+ "rewards/chosen": -0.2669564187526703,
1808
+ "rewards/margins": 0.6804448962211609,
1809
+ "rewards/rejected": -0.9474013447761536,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.830829971676251,
1814
+ "grad_norm": 39.317898860436635,
1815
+ "learning_rate": 9.458428680396643e-08,
1816
+ "logits/chosen": 0.20807792246341705,
1817
+ "logits/rejected": 0.2506875991821289,
1818
+ "logps/chosen": -194.7408905029297,
1819
+ "logps/rejected": -202.78555297851562,
1820
+ "loss": 0.5341,
1821
+ "rewards/accuracies": 0.7359374761581421,
1822
+ "rewards/chosen": -0.32548367977142334,
1823
+ "rewards/margins": 0.6120297908782959,
1824
+ "rewards/rejected": -0.9375134706497192,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.837696335078534,
1829
+ "grad_norm": 39.302270375674155,
1830
+ "learning_rate": 9.077040427154843e-08,
1831
+ "logits/chosen": 0.2483367919921875,
1832
+ "logits/rejected": 0.27390342950820923,
1833
+ "logps/chosen": -197.6544647216797,
1834
+ "logps/rejected": -204.05047607421875,
1835
+ "loss": 0.51,
1836
+ "rewards/accuracies": 0.762499988079071,
1837
+ "rewards/chosen": -0.2905549108982086,
1838
+ "rewards/margins": 0.6854307651519775,
1839
+ "rewards/rejected": -0.9759857058525085,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8445626984808171,
1844
+ "grad_norm": 36.66056019529055,
1845
+ "learning_rate": 8.695652173913042e-08,
1846
+ "logits/chosen": 0.28719934821128845,
1847
+ "logits/rejected": 0.3099461495876312,
1848
+ "logps/chosen": -186.6349639892578,
1849
+ "logps/rejected": -193.26979064941406,
1850
+ "loss": 0.5175,
1851
+ "rewards/accuracies": 0.765625,
1852
+ "rewards/chosen": -0.388496994972229,
1853
+ "rewards/margins": 0.6531878709793091,
1854
+ "rewards/rejected": -1.041684865951538,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.8514290618831002,
1859
+ "grad_norm": 36.07042098698609,
1860
+ "learning_rate": 8.314263920671243e-08,
1861
+ "logits/chosen": 0.2238319218158722,
1862
+ "logits/rejected": 0.2526761591434479,
1863
+ "logps/chosen": -200.8705291748047,
1864
+ "logps/rejected": -209.2767791748047,
1865
+ "loss": 0.5419,
1866
+ "rewards/accuracies": 0.7515625357627869,
1867
+ "rewards/chosen": -0.359815776348114,
1868
+ "rewards/margins": 0.5757598280906677,
1869
+ "rewards/rejected": -0.9355756044387817,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8582954252853833,
1874
+ "grad_norm": 37.98916737589895,
1875
+ "learning_rate": 7.932875667429442e-08,
1876
+ "logits/chosen": 0.2867482602596283,
1877
+ "logits/rejected": 0.2855786979198456,
1878
+ "logps/chosen": -200.7849578857422,
1879
+ "logps/rejected": -209.64694213867188,
1880
+ "loss": 0.5386,
1881
+ "rewards/accuracies": 0.7484375238418579,
1882
+ "rewards/chosen": -0.3794117867946625,
1883
+ "rewards/margins": 0.623035728931427,
1884
+ "rewards/rejected": -1.002447485923767,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8651617886876662,
1889
+ "grad_norm": 40.53269809980178,
1890
+ "learning_rate": 7.551487414187643e-08,
1891
+ "logits/chosen": 0.25138917565345764,
1892
+ "logits/rejected": 0.2744777798652649,
1893
+ "logps/chosen": -199.57286071777344,
1894
+ "logps/rejected": -204.9058074951172,
1895
+ "loss": 0.5072,
1896
+ "rewards/accuracies": 0.7562500238418579,
1897
+ "rewards/chosen": -0.28435054421424866,
1898
+ "rewards/margins": 0.6674630641937256,
1899
+ "rewards/rejected": -0.9518135786056519,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.8720281520899493,
1904
+ "grad_norm": 44.84817578049332,
1905
+ "learning_rate": 7.170099160945843e-08,
1906
+ "logits/chosen": 0.2759166359901428,
1907
+ "logits/rejected": 0.32305389642715454,
1908
+ "logps/chosen": -202.01171875,
1909
+ "logps/rejected": -206.69354248046875,
1910
+ "loss": 0.5378,
1911
+ "rewards/accuracies": 0.7171875238418579,
1912
+ "rewards/chosen": -0.3561447262763977,
1913
+ "rewards/margins": 0.609983503818512,
1914
+ "rewards/rejected": -0.9661281704902649,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8788945154922324,
1919
+ "grad_norm": 33.239181880901405,
1920
+ "learning_rate": 6.788710907704043e-08,
1921
+ "logits/chosen": 0.27904975414276123,
1922
+ "logits/rejected": 0.3506329655647278,
1923
+ "logps/chosen": -190.4373779296875,
1924
+ "logps/rejected": -194.3594512939453,
1925
+ "loss": 0.5169,
1926
+ "rewards/accuracies": 0.7468750476837158,
1927
+ "rewards/chosen": -0.33293962478637695,
1928
+ "rewards/margins": 0.6674720048904419,
1929
+ "rewards/rejected": -1.0004115104675293,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8857608788945155,
1934
+ "grad_norm": 42.24278587548862,
1935
+ "learning_rate": 6.407322654462242e-08,
1936
+ "logits/chosen": 0.22125577926635742,
1937
+ "logits/rejected": 0.23234713077545166,
1938
+ "logps/chosen": -196.283935546875,
1939
+ "logps/rejected": -206.2383270263672,
1940
+ "loss": 0.4972,
1941
+ "rewards/accuracies": 0.7906249761581421,
1942
+ "rewards/chosen": -0.2478475719690323,
1943
+ "rewards/margins": 0.7637631893157959,
1944
+ "rewards/rejected": -1.011610746383667,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.8926272422967986,
1949
+ "grad_norm": 48.14687039805379,
1950
+ "learning_rate": 6.025934401220442e-08,
1951
+ "logits/chosen": 0.2196926772594452,
1952
+ "logits/rejected": 0.26132142543792725,
1953
+ "logps/chosen": -199.7481231689453,
1954
+ "logps/rejected": -205.6083526611328,
1955
+ "loss": 0.5156,
1956
+ "rewards/accuracies": 0.7640625238418579,
1957
+ "rewards/chosen": -0.32518845796585083,
1958
+ "rewards/margins": 0.6625872850418091,
1959
+ "rewards/rejected": -0.9877756834030151,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.8994936056990817,
1964
+ "grad_norm": 40.136271946129845,
1965
+ "learning_rate": 5.644546147978642e-08,
1966
+ "logits/chosen": 0.2690100073814392,
1967
+ "logits/rejected": 0.3203776478767395,
1968
+ "logps/chosen": -196.69924926757812,
1969
+ "logps/rejected": -207.5710906982422,
1970
+ "loss": 0.517,
1971
+ "rewards/accuracies": 0.760937511920929,
1972
+ "rewards/chosen": -0.3163500428199768,
1973
+ "rewards/margins": 0.6733758449554443,
1974
+ "rewards/rejected": -0.9897259473800659,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.9063599691013647,
1979
+ "grad_norm": 38.232705361235354,
1980
+ "learning_rate": 5.2631578947368416e-08,
1981
+ "logits/chosen": 0.22911009192466736,
1982
+ "logits/rejected": 0.2933771312236786,
1983
+ "logps/chosen": -193.46011352539062,
1984
+ "logps/rejected": -198.90769958496094,
1985
+ "loss": 0.5129,
1986
+ "rewards/accuracies": 0.768750011920929,
1987
+ "rewards/chosen": -0.31777265667915344,
1988
+ "rewards/margins": 0.6428579688072205,
1989
+ "rewards/rejected": -0.9606305956840515,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.9132263325036477,
1994
+ "grad_norm": 32.32629597775676,
1995
+ "learning_rate": 4.881769641495042e-08,
1996
+ "logits/chosen": 0.2851574122905731,
1997
+ "logits/rejected": 0.3311702311038971,
1998
+ "logps/chosen": -190.674560546875,
1999
+ "logps/rejected": -198.54489135742188,
2000
+ "loss": 0.518,
2001
+ "rewards/accuracies": 0.7562500238418579,
2002
+ "rewards/chosen": -0.36391162872314453,
2003
+ "rewards/margins": 0.6667385101318359,
2004
+ "rewards/rejected": -1.0306501388549805,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9200926959059308,
2009
+ "grad_norm": 34.564104656300344,
2010
+ "learning_rate": 4.5003813882532416e-08,
2011
+ "logits/chosen": 0.2709936499595642,
2012
+ "logits/rejected": 0.29850390553474426,
2013
+ "logps/chosen": -190.10250854492188,
2014
+ "logps/rejected": -201.5867919921875,
2015
+ "loss": 0.5245,
2016
+ "rewards/accuracies": 0.754687488079071,
2017
+ "rewards/chosen": -0.33972710371017456,
2018
+ "rewards/margins": 0.6379141807556152,
2019
+ "rewards/rejected": -0.9776412844657898,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9269590593082139,
2024
+ "grad_norm": 40.20432402146685,
2025
+ "learning_rate": 4.118993135011441e-08,
2026
+ "logits/chosen": 0.31429168581962585,
2027
+ "logits/rejected": 0.3014827370643616,
2028
+ "logps/chosen": -193.68734741210938,
2029
+ "logps/rejected": -206.73443603515625,
2030
+ "loss": 0.5043,
2031
+ "rewards/accuracies": 0.7421875,
2032
+ "rewards/chosen": -0.33307623863220215,
2033
+ "rewards/margins": 0.733502984046936,
2034
+ "rewards/rejected": -1.0665792226791382,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.933825422710497,
2039
+ "grad_norm": 42.378701373805164,
2040
+ "learning_rate": 3.737604881769641e-08,
2041
+ "logits/chosen": 0.26147449016571045,
2042
+ "logits/rejected": 0.32908618450164795,
2043
+ "logps/chosen": -202.1975860595703,
2044
+ "logps/rejected": -206.84689331054688,
2045
+ "loss": 0.5196,
2046
+ "rewards/accuracies": 0.754687488079071,
2047
+ "rewards/chosen": -0.30808600783348083,
2048
+ "rewards/margins": 0.6519337296485901,
2049
+ "rewards/rejected": -0.9600198268890381,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.94069178611278,
2054
+ "grad_norm": 35.41777915691115,
2055
+ "learning_rate": 3.356216628527841e-08,
2056
+ "logits/chosen": 0.24308253824710846,
2057
+ "logits/rejected": 0.28230151534080505,
2058
+ "logps/chosen": -202.4017333984375,
2059
+ "logps/rejected": -206.34548950195312,
2060
+ "loss": 0.5305,
2061
+ "rewards/accuracies": 0.745312511920929,
2062
+ "rewards/chosen": -0.364469051361084,
2063
+ "rewards/margins": 0.6204385161399841,
2064
+ "rewards/rejected": -0.9849075078964233,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.9475581495150631,
2069
+ "grad_norm": 31.86221090386857,
2070
+ "learning_rate": 2.9748283752860413e-08,
2071
+ "logits/chosen": 0.2843090295791626,
2072
+ "logits/rejected": 0.316922128200531,
2073
+ "logps/chosen": -189.54446411132812,
2074
+ "logps/rejected": -199.91403198242188,
2075
+ "loss": 0.4831,
2076
+ "rewards/accuracies": 0.792187511920929,
2077
+ "rewards/chosen": -0.2866865396499634,
2078
+ "rewards/margins": 0.7708964347839355,
2079
+ "rewards/rejected": -1.0575830936431885,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.9544245129173462,
2084
+ "grad_norm": 36.53519638891643,
2085
+ "learning_rate": 2.593440122044241e-08,
2086
+ "logits/chosen": 0.295163631439209,
2087
+ "logits/rejected": 0.27683940529823303,
2088
+ "logps/chosen": -198.69625854492188,
2089
+ "logps/rejected": -206.66403198242188,
2090
+ "loss": 0.491,
2091
+ "rewards/accuracies": 0.776562511920929,
2092
+ "rewards/chosen": -0.2761452794075012,
2093
+ "rewards/margins": 0.7478234171867371,
2094
+ "rewards/rejected": -1.0239686965942383,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9612908763196292,
2099
+ "grad_norm": 35.87253035932275,
2100
+ "learning_rate": 2.212051868802441e-08,
2101
+ "logits/chosen": 0.2549622058868408,
2102
+ "logits/rejected": 0.2964940071105957,
2103
+ "logps/chosen": -191.95323181152344,
2104
+ "logps/rejected": -199.76100158691406,
2105
+ "loss": 0.521,
2106
+ "rewards/accuracies": 0.7421875,
2107
+ "rewards/chosen": -0.30250483751296997,
2108
+ "rewards/margins": 0.6729665994644165,
2109
+ "rewards/rejected": -0.9754714965820312,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9681572397219123,
2114
+ "grad_norm": 36.42687734600846,
2115
+ "learning_rate": 1.8306636155606407e-08,
2116
+ "logits/chosen": 0.272115558385849,
2117
+ "logits/rejected": 0.32787230610847473,
2118
+ "logps/chosen": -199.0819091796875,
2119
+ "logps/rejected": -205.5025177001953,
2120
+ "loss": 0.5076,
2121
+ "rewards/accuracies": 0.7562500238418579,
2122
+ "rewards/chosen": -0.32656988501548767,
2123
+ "rewards/margins": 0.6987006068229675,
2124
+ "rewards/rejected": -1.0252704620361328,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9750236031241953,
2129
+ "grad_norm": 32.32361768075354,
2130
+ "learning_rate": 1.4492753623188406e-08,
2131
+ "logits/chosen": 0.2514479458332062,
2132
+ "logits/rejected": 0.29168808460235596,
2133
+ "logps/chosen": -191.49757385253906,
2134
+ "logps/rejected": -199.6735076904297,
2135
+ "loss": 0.5287,
2136
+ "rewards/accuracies": 0.7593750357627869,
2137
+ "rewards/chosen": -0.34709036350250244,
2138
+ "rewards/margins": 0.6339684724807739,
2139
+ "rewards/rejected": -0.9810588955879211,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.9818899665264784,
2144
+ "grad_norm": 38.90237712846689,
2145
+ "learning_rate": 1.0678871090770404e-08,
2146
+ "logits/chosen": 0.2903396487236023,
2147
+ "logits/rejected": 0.3211938142776489,
2148
+ "logps/chosen": -199.82485961914062,
2149
+ "logps/rejected": -210.410400390625,
2150
+ "loss": 0.5281,
2151
+ "rewards/accuracies": 0.75,
2152
+ "rewards/chosen": -0.28799813985824585,
2153
+ "rewards/margins": 0.6520898342132568,
2154
+ "rewards/rejected": -0.9400879740715027,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.9887563299287615,
2159
+ "grad_norm": 36.50225194256435,
2160
+ "learning_rate": 6.864988558352402e-09,
2161
+ "logits/chosen": 0.2802445888519287,
2162
+ "logits/rejected": 0.32968616485595703,
2163
+ "logps/chosen": -182.9055633544922,
2164
+ "logps/rejected": -195.48397827148438,
2165
+ "loss": 0.5302,
2166
+ "rewards/accuracies": 0.7250000238418579,
2167
+ "rewards/chosen": -0.3680599331855774,
2168
+ "rewards/margins": 0.6278579831123352,
2169
+ "rewards/rejected": -0.9959178566932678,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 0.9956226933310446,
2174
+ "grad_norm": 37.0877105218469,
2175
+ "learning_rate": 3.0511060259344012e-09,
2176
+ "logits/chosen": 0.23157478868961334,
2177
+ "logits/rejected": 0.26915648579597473,
2178
+ "logps/chosen": -202.55470275878906,
2179
+ "logps/rejected": -210.82720947265625,
2180
+ "loss": 0.518,
2181
+ "rewards/accuracies": 0.7562500238418579,
2182
+ "rewards/chosen": -0.3523581624031067,
2183
+ "rewards/margins": 0.6836462616920471,
2184
+ "rewards/rejected": -1.0360045433044434,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 1.0,
2189
+ "step": 1457,
2190
+ "total_flos": 160669524688896.0,
2191
+ "train_loss": 0.5754778021889021,
2192
+ "train_runtime": 14476.1212,
2193
+ "train_samples_per_second": 6.439,
2194
+ "train_steps_per_second": 0.101
2195
+ }
2196
+ ],
2197
+ "logging_steps": 10,
2198
+ "max_steps": 1457,
2199
+ "num_input_tokens_seen": 0,
2200
+ "num_train_epochs": 1,
2201
+ "save_steps": 250,
2202
+ "stateful_callbacks": {
2203
+ "TrainerControl": {
2204
+ "args": {
2205
+ "should_epoch_stop": false,
2206
+ "should_evaluate": false,
2207
+ "should_log": false,
2208
+ "should_save": true,
2209
+ "should_training_stop": true
2210
+ },
2211
+ "attributes": {}
2212
+ }
2213
+ },
2214
+ "total_flos": 160669524688896.0,
2215
+ "train_batch_size": 1,
2216
+ "trial_name": null,
2217
+ "trial_params": null
2218
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:931683ea9d56c1ca2e54f47ee04d8b5407fe8dac1f1220d8edb2e8aef0dfa4f0
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED