davidanugraha commited on
Commit
944ebdc
·
verified ·
1 Parent(s): 162c58a

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_nemotron_en
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_nemotron_en
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_nemotron_en dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 159648317243392.0,
4
+ "train_loss": 0.5763244779045961,
5
+ "train_runtime": 14193.5309,
6
+ "train_samples_per_second": 6.548,
7
+ "train_steps_per_second": 0.102
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:927047f15317348b9a1ad41282049fa342ef06ebe767a27a48ba6d539969a719
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81e142d2b0767b3d7d889a700b33009e969dbac78a553630bb980fc561bb9107
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 159648317243392.0,
4
+ "train_loss": 0.5763244779045961,
5
+ "train_runtime": 14193.5309,
6
+ "train_samples_per_second": 6.548,
7
+ "train_steps_per_second": 0.102
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1453, "loss": 0.694, "accuracy": 0.3843749761581421, "lr": 6.164383561643836e-08, "epoch": 0.006885866758478224, "percentage": 0.69, "elapsed_time": "0:01:43", "remaining_time": "4:08:12"}
2
+ {"current_steps": 20, "total_steps": 1453, "loss": 0.6943, "accuracy": 0.5015624761581421, "lr": 1.3013698630136985e-07, "epoch": 0.013771733516956448, "percentage": 1.38, "elapsed_time": "0:03:20", "remaining_time": "3:59:19"}
3
+ {"current_steps": 30, "total_steps": 1453, "loss": 0.6931, "accuracy": 0.4937500059604645, "lr": 1.9863013698630135e-07, "epoch": 0.02065760027543467, "percentage": 2.06, "elapsed_time": "0:05:00", "remaining_time": "3:57:47"}
4
+ {"current_steps": 40, "total_steps": 1453, "loss": 0.6934, "accuracy": 0.53125, "lr": 2.671232876712329e-07, "epoch": 0.027543467033912895, "percentage": 2.75, "elapsed_time": "0:06:37", "remaining_time": "3:54:07"}
5
+ {"current_steps": 50, "total_steps": 1453, "loss": 0.6927, "accuracy": 0.5265625715255737, "lr": 3.3561643835616436e-07, "epoch": 0.03442933379239112, "percentage": 3.44, "elapsed_time": "0:08:14", "remaining_time": "3:51:22"}
6
+ {"current_steps": 60, "total_steps": 1453, "loss": 0.6917, "accuracy": 0.5062500238418579, "lr": 4.041095890410959e-07, "epoch": 0.04131520055086934, "percentage": 4.13, "elapsed_time": "0:09:53", "remaining_time": "3:49:32"}
7
+ {"current_steps": 70, "total_steps": 1453, "loss": 0.6929, "accuracy": 0.503125011920929, "lr": 4.726027397260274e-07, "epoch": 0.04820106730934756, "percentage": 4.82, "elapsed_time": "0:11:33", "remaining_time": "3:48:28"}
8
+ {"current_steps": 80, "total_steps": 1453, "loss": 0.6908, "accuracy": 0.5437500476837158, "lr": 5.410958904109589e-07, "epoch": 0.05508693406782579, "percentage": 5.51, "elapsed_time": "0:13:12", "remaining_time": "3:46:43"}
9
+ {"current_steps": 90, "total_steps": 1453, "loss": 0.6894, "accuracy": 0.5625, "lr": 6.095890410958904e-07, "epoch": 0.06197280082630401, "percentage": 6.19, "elapsed_time": "0:14:50", "remaining_time": "3:44:44"}
10
+ {"current_steps": 100, "total_steps": 1453, "loss": 0.6901, "accuracy": 0.5531250238418579, "lr": 6.78082191780822e-07, "epoch": 0.06885866758478223, "percentage": 6.88, "elapsed_time": "0:16:29", "remaining_time": "3:43:05"}
11
+ {"current_steps": 110, "total_steps": 1453, "loss": 0.6856, "accuracy": 0.6171875, "lr": 7.465753424657533e-07, "epoch": 0.07574453434326046, "percentage": 7.57, "elapsed_time": "0:18:06", "remaining_time": "3:41:10"}
12
+ {"current_steps": 120, "total_steps": 1453, "loss": 0.6837, "accuracy": 0.5953124761581421, "lr": 8.150684931506849e-07, "epoch": 0.08263040110173868, "percentage": 8.26, "elapsed_time": "0:19:45", "remaining_time": "3:39:28"}
13
+ {"current_steps": 130, "total_steps": 1453, "loss": 0.682, "accuracy": 0.6031250357627869, "lr": 8.835616438356164e-07, "epoch": 0.08951626786021691, "percentage": 8.95, "elapsed_time": "0:21:24", "remaining_time": "3:37:47"}
14
+ {"current_steps": 140, "total_steps": 1453, "loss": 0.6789, "accuracy": 0.6000000238418579, "lr": 9.520547945205479e-07, "epoch": 0.09640213461869512, "percentage": 9.64, "elapsed_time": "0:23:02", "remaining_time": "3:36:07"}
15
+ {"current_steps": 150, "total_steps": 1453, "loss": 0.6775, "accuracy": 0.5906250476837158, "lr": 9.977046671767407e-07, "epoch": 0.10328800137717335, "percentage": 10.32, "elapsed_time": "0:24:41", "remaining_time": "3:34:33"}
16
+ {"current_steps": 160, "total_steps": 1453, "loss": 0.6647, "accuracy": 0.6546875238418579, "lr": 9.90053557765876e-07, "epoch": 0.11017386813565158, "percentage": 11.01, "elapsed_time": "0:26:20", "remaining_time": "3:32:52"}
17
+ {"current_steps": 170, "total_steps": 1453, "loss": 0.6698, "accuracy": 0.604687511920929, "lr": 9.824024483550113e-07, "epoch": 0.1170597348941298, "percentage": 11.7, "elapsed_time": "0:27:59", "remaining_time": "3:31:18"}
18
+ {"current_steps": 180, "total_steps": 1453, "loss": 0.6713, "accuracy": 0.612500011920929, "lr": 9.747513389441468e-07, "epoch": 0.12394560165260803, "percentage": 12.39, "elapsed_time": "0:29:38", "remaining_time": "3:29:40"}
19
+ {"current_steps": 190, "total_steps": 1453, "loss": 0.6543, "accuracy": 0.6296875476837158, "lr": 9.671002295332823e-07, "epoch": 0.13083146841108625, "percentage": 13.08, "elapsed_time": "0:31:16", "remaining_time": "3:27:55"}
20
+ {"current_steps": 200, "total_steps": 1453, "loss": 0.6718, "accuracy": 0.609375, "lr": 9.594491201224178e-07, "epoch": 0.13771733516956447, "percentage": 13.76, "elapsed_time": "0:32:55", "remaining_time": "3:26:14"}
21
+ {"current_steps": 210, "total_steps": 1453, "loss": 0.657, "accuracy": 0.6312500238418579, "lr": 9.517980107115531e-07, "epoch": 0.14460320192804269, "percentage": 14.45, "elapsed_time": "0:34:34", "remaining_time": "3:24:41"}
22
+ {"current_steps": 220, "total_steps": 1453, "loss": 0.6495, "accuracy": 0.6375000476837158, "lr": 9.441469013006885e-07, "epoch": 0.15148906868652093, "percentage": 15.14, "elapsed_time": "0:36:12", "remaining_time": "3:22:57"}
23
+ {"current_steps": 230, "total_steps": 1453, "loss": 0.6483, "accuracy": 0.6421874761581421, "lr": 9.364957918898239e-07, "epoch": 0.15837493544499914, "percentage": 15.83, "elapsed_time": "0:37:52", "remaining_time": "3:21:21"}
24
+ {"current_steps": 240, "total_steps": 1453, "loss": 0.641, "accuracy": 0.643750011920929, "lr": 9.288446824789594e-07, "epoch": 0.16526080220347736, "percentage": 16.52, "elapsed_time": "0:39:29", "remaining_time": "3:19:38"}
25
+ {"current_steps": 250, "total_steps": 1453, "loss": 0.6497, "accuracy": 0.6328125596046448, "lr": 9.211935730680948e-07, "epoch": 0.17214666896195557, "percentage": 17.21, "elapsed_time": "0:41:08", "remaining_time": "3:18:00"}
26
+ {"current_steps": 260, "total_steps": 1453, "loss": 0.6466, "accuracy": 0.6296875476837158, "lr": 9.135424636572303e-07, "epoch": 0.17903253572043382, "percentage": 17.89, "elapsed_time": "0:42:47", "remaining_time": "3:16:21"}
27
+ {"current_steps": 270, "total_steps": 1453, "loss": 0.6325, "accuracy": 0.6312500238418579, "lr": 9.058913542463656e-07, "epoch": 0.18591840247891203, "percentage": 18.58, "elapsed_time": "0:44:26", "remaining_time": "3:14:41"}
28
+ {"current_steps": 280, "total_steps": 1453, "loss": 0.6299, "accuracy": 0.6625000238418579, "lr": 8.982402448355011e-07, "epoch": 0.19280426923739025, "percentage": 19.27, "elapsed_time": "0:46:03", "remaining_time": "3:12:57"}
29
+ {"current_steps": 290, "total_steps": 1453, "loss": 0.6268, "accuracy": 0.6671875715255737, "lr": 8.905891354246365e-07, "epoch": 0.1996901359958685, "percentage": 19.96, "elapsed_time": "0:47:41", "remaining_time": "3:11:14"}
30
+ {"current_steps": 300, "total_steps": 1453, "loss": 0.6151, "accuracy": 0.7046874761581421, "lr": 8.829380260137719e-07, "epoch": 0.2065760027543467, "percentage": 20.65, "elapsed_time": "0:49:20", "remaining_time": "3:09:38"}
31
+ {"current_steps": 310, "total_steps": 1453, "loss": 0.6302, "accuracy": 0.6578125357627869, "lr": 8.752869166029074e-07, "epoch": 0.21346186951282492, "percentage": 21.34, "elapsed_time": "0:51:00", "remaining_time": "3:08:04"}
32
+ {"current_steps": 320, "total_steps": 1453, "loss": 0.6266, "accuracy": 0.6656249761581421, "lr": 8.676358071920427e-07, "epoch": 0.22034773627130316, "percentage": 22.02, "elapsed_time": "0:52:36", "remaining_time": "3:06:17"}
33
+ {"current_steps": 330, "total_steps": 1453, "loss": 0.612, "accuracy": 0.6796875, "lr": 8.599846977811782e-07, "epoch": 0.22723360302978138, "percentage": 22.71, "elapsed_time": "0:54:13", "remaining_time": "3:04:32"}
34
+ {"current_steps": 340, "total_steps": 1453, "loss": 0.6088, "accuracy": 0.6796875596046448, "lr": 8.523335883703136e-07, "epoch": 0.2341194697882596, "percentage": 23.4, "elapsed_time": "0:55:52", "remaining_time": "3:02:56"}
35
+ {"current_steps": 350, "total_steps": 1453, "loss": 0.6174, "accuracy": 0.667187511920929, "lr": 8.446824789594492e-07, "epoch": 0.2410053365467378, "percentage": 24.09, "elapsed_time": "0:57:29", "remaining_time": "3:01:11"}
36
+ {"current_steps": 360, "total_steps": 1453, "loss": 0.6075, "accuracy": 0.6765625476837158, "lr": 8.370313695485846e-07, "epoch": 0.24789120330521605, "percentage": 24.78, "elapsed_time": "0:59:07", "remaining_time": "2:59:30"}
37
+ {"current_steps": 370, "total_steps": 1453, "loss": 0.6099, "accuracy": 0.6812500357627869, "lr": 8.293802601377199e-07, "epoch": 0.25477707006369427, "percentage": 25.46, "elapsed_time": "1:00:45", "remaining_time": "2:57:49"}
38
+ {"current_steps": 380, "total_steps": 1453, "loss": 0.612, "accuracy": 0.659375011920929, "lr": 8.217291507268554e-07, "epoch": 0.2616629368221725, "percentage": 26.15, "elapsed_time": "1:02:23", "remaining_time": "2:56:10"}
39
+ {"current_steps": 390, "total_steps": 1453, "loss": 0.6082, "accuracy": 0.6796875, "lr": 8.140780413159908e-07, "epoch": 0.2685488035806507, "percentage": 26.84, "elapsed_time": "1:03:59", "remaining_time": "2:54:25"}
40
+ {"current_steps": 400, "total_steps": 1453, "loss": 0.6055, "accuracy": 0.668749988079071, "lr": 8.064269319051263e-07, "epoch": 0.27543467033912894, "percentage": 27.53, "elapsed_time": "1:05:39", "remaining_time": "2:52:50"}
41
+ {"current_steps": 410, "total_steps": 1453, "loss": 0.5995, "accuracy": 0.6812499761581421, "lr": 7.987758224942617e-07, "epoch": 0.2823205370976072, "percentage": 28.22, "elapsed_time": "1:07:19", "remaining_time": "2:51:15"}
42
+ {"current_steps": 420, "total_steps": 1453, "loss": 0.6077, "accuracy": 0.6578124761581421, "lr": 7.91124713083397e-07, "epoch": 0.28920640385608537, "percentage": 28.91, "elapsed_time": "1:08:57", "remaining_time": "2:49:36"}
43
+ {"current_steps": 430, "total_steps": 1453, "loss": 0.5838, "accuracy": 0.7093750238418579, "lr": 7.834736036725325e-07, "epoch": 0.2960922706145636, "percentage": 29.59, "elapsed_time": "1:10:36", "remaining_time": "2:47:58"}
44
+ {"current_steps": 440, "total_steps": 1453, "loss": 0.5943, "accuracy": 0.7171875238418579, "lr": 7.758224942616679e-07, "epoch": 0.30297813737304186, "percentage": 30.28, "elapsed_time": "1:12:15", "remaining_time": "2:46:22"}
45
+ {"current_steps": 450, "total_steps": 1453, "loss": 0.6007, "accuracy": 0.6968750357627869, "lr": 7.681713848508034e-07, "epoch": 0.30986400413152004, "percentage": 30.97, "elapsed_time": "1:13:53", "remaining_time": "2:44:42"}
46
+ {"current_steps": 460, "total_steps": 1453, "loss": 0.5764, "accuracy": 0.6859375238418579, "lr": 7.605202754399388e-07, "epoch": 0.3167498708899983, "percentage": 31.66, "elapsed_time": "1:15:32", "remaining_time": "2:43:03"}
47
+ {"current_steps": 470, "total_steps": 1453, "loss": 0.5815, "accuracy": 0.7015624642372131, "lr": 7.528691660290742e-07, "epoch": 0.32363573764847653, "percentage": 32.35, "elapsed_time": "1:17:10", "remaining_time": "2:41:24"}
48
+ {"current_steps": 480, "total_steps": 1453, "loss": 0.601, "accuracy": 0.671875, "lr": 7.452180566182096e-07, "epoch": 0.3305216044069547, "percentage": 33.04, "elapsed_time": "1:18:50", "remaining_time": "2:39:48"}
49
+ {"current_steps": 490, "total_steps": 1453, "loss": 0.5945, "accuracy": 0.6687500476837158, "lr": 7.37566947207345e-07, "epoch": 0.33740747116543296, "percentage": 33.72, "elapsed_time": "1:20:27", "remaining_time": "2:38:07"}
50
+ {"current_steps": 500, "total_steps": 1453, "loss": 0.5829, "accuracy": 0.699999988079071, "lr": 7.299158377964805e-07, "epoch": 0.34429333792391115, "percentage": 34.41, "elapsed_time": "1:22:06", "remaining_time": "2:36:29"}
51
+ {"current_steps": 510, "total_steps": 1453, "loss": 0.5725, "accuracy": 0.7312500476837158, "lr": 7.222647283856159e-07, "epoch": 0.3511792046823894, "percentage": 35.1, "elapsed_time": "1:24:07", "remaining_time": "2:35:32"}
52
+ {"current_steps": 520, "total_steps": 1453, "loss": 0.577, "accuracy": 0.703125, "lr": 7.146136189747513e-07, "epoch": 0.35806507144086763, "percentage": 35.79, "elapsed_time": "1:25:43", "remaining_time": "2:33:47"}
53
+ {"current_steps": 530, "total_steps": 1453, "loss": 0.5766, "accuracy": 0.707812488079071, "lr": 7.069625095638867e-07, "epoch": 0.3649509381993458, "percentage": 36.48, "elapsed_time": "1:27:21", "remaining_time": "2:32:07"}
54
+ {"current_steps": 540, "total_steps": 1453, "loss": 0.5615, "accuracy": 0.7171875238418579, "lr": 6.993114001530222e-07, "epoch": 0.37183680495782406, "percentage": 37.16, "elapsed_time": "1:28:57", "remaining_time": "2:30:23"}
55
+ {"current_steps": 550, "total_steps": 1453, "loss": 0.5994, "accuracy": 0.6890625357627869, "lr": 6.916602907421576e-07, "epoch": 0.3787226717163023, "percentage": 37.85, "elapsed_time": "1:30:34", "remaining_time": "2:28:42"}
56
+ {"current_steps": 560, "total_steps": 1453, "loss": 0.6023, "accuracy": 0.6656250357627869, "lr": 6.84009181331293e-07, "epoch": 0.3856085384747805, "percentage": 38.54, "elapsed_time": "1:32:09", "remaining_time": "2:26:57"}
57
+ {"current_steps": 570, "total_steps": 1453, "loss": 0.5773, "accuracy": 0.6812500357627869, "lr": 6.763580719204285e-07, "epoch": 0.39249440523325874, "percentage": 39.23, "elapsed_time": "1:33:47", "remaining_time": "2:25:18"}
58
+ {"current_steps": 580, "total_steps": 1453, "loss": 0.5876, "accuracy": 0.7093750238418579, "lr": 6.687069625095638e-07, "epoch": 0.399380271991737, "percentage": 39.92, "elapsed_time": "1:35:24", "remaining_time": "2:23:36"}
59
+ {"current_steps": 590, "total_steps": 1453, "loss": 0.5925, "accuracy": 0.6734374761581421, "lr": 6.610558530986993e-07, "epoch": 0.40626613875021517, "percentage": 40.61, "elapsed_time": "1:37:00", "remaining_time": "2:21:53"}
60
+ {"current_steps": 600, "total_steps": 1453, "loss": 0.5923, "accuracy": 0.6875, "lr": 6.534047436878347e-07, "epoch": 0.4131520055086934, "percentage": 41.29, "elapsed_time": "1:38:37", "remaining_time": "2:20:12"}
61
+ {"current_steps": 610, "total_steps": 1453, "loss": 0.6058, "accuracy": 0.6890625357627869, "lr": 6.457536342769701e-07, "epoch": 0.42003787226717165, "percentage": 41.98, "elapsed_time": "1:40:12", "remaining_time": "2:18:29"}
62
+ {"current_steps": 620, "total_steps": 1453, "loss": 0.5515, "accuracy": 0.7312500476837158, "lr": 6.381025248661056e-07, "epoch": 0.42692373902564984, "percentage": 42.67, "elapsed_time": "1:41:47", "remaining_time": "2:16:46"}
63
+ {"current_steps": 630, "total_steps": 1453, "loss": 0.5643, "accuracy": 0.723437488079071, "lr": 6.304514154552409e-07, "epoch": 0.4338096057841281, "percentage": 43.36, "elapsed_time": "1:43:25", "remaining_time": "2:15:05"}
64
+ {"current_steps": 640, "total_steps": 1453, "loss": 0.5342, "accuracy": 0.7531250715255737, "lr": 6.228003060443764e-07, "epoch": 0.4406954725426063, "percentage": 44.05, "elapsed_time": "1:44:59", "remaining_time": "2:13:21"}
65
+ {"current_steps": 650, "total_steps": 1453, "loss": 0.5964, "accuracy": 0.6953125, "lr": 6.151491966335118e-07, "epoch": 0.4475813393010845, "percentage": 44.74, "elapsed_time": "1:46:35", "remaining_time": "2:11:41"}
66
+ {"current_steps": 660, "total_steps": 1453, "loss": 0.593, "accuracy": 0.6875, "lr": 6.074980872226473e-07, "epoch": 0.45446720605956276, "percentage": 45.42, "elapsed_time": "1:48:11", "remaining_time": "2:10:00"}
67
+ {"current_steps": 670, "total_steps": 1453, "loss": 0.5572, "accuracy": 0.7109375, "lr": 5.998469778117827e-07, "epoch": 0.46135307281804094, "percentage": 46.11, "elapsed_time": "1:49:47", "remaining_time": "2:08:18"}
68
+ {"current_steps": 680, "total_steps": 1453, "loss": 0.574, "accuracy": 0.7000000476837158, "lr": 5.92195868400918e-07, "epoch": 0.4682389395765192, "percentage": 46.8, "elapsed_time": "1:51:24", "remaining_time": "2:06:39"}
69
+ {"current_steps": 690, "total_steps": 1453, "loss": 0.5606, "accuracy": 0.7375000715255737, "lr": 5.845447589900535e-07, "epoch": 0.47512480633499743, "percentage": 47.49, "elapsed_time": "1:53:01", "remaining_time": "2:04:58"}
70
+ {"current_steps": 700, "total_steps": 1453, "loss": 0.5816, "accuracy": 0.6968749761581421, "lr": 5.768936495791889e-07, "epoch": 0.4820106730934756, "percentage": 48.18, "elapsed_time": "1:54:38", "remaining_time": "2:03:19"}
71
+ {"current_steps": 710, "total_steps": 1453, "loss": 0.5411, "accuracy": 0.7359374761581421, "lr": 5.692425401683244e-07, "epoch": 0.48889653985195386, "percentage": 48.86, "elapsed_time": "1:56:14", "remaining_time": "2:01:39"}
72
+ {"current_steps": 720, "total_steps": 1453, "loss": 0.5499, "accuracy": 0.731249988079071, "lr": 5.615914307574598e-07, "epoch": 0.4957824066104321, "percentage": 49.55, "elapsed_time": "1:57:50", "remaining_time": "1:59:58"}
73
+ {"current_steps": 730, "total_steps": 1453, "loss": 0.5664, "accuracy": 0.7312500476837158, "lr": 5.539403213465952e-07, "epoch": 0.5026682733689103, "percentage": 50.24, "elapsed_time": "1:59:27", "remaining_time": "1:58:19"}
74
+ {"current_steps": 740, "total_steps": 1453, "loss": 0.6024, "accuracy": 0.6968750357627869, "lr": 5.462892119357306e-07, "epoch": 0.5095541401273885, "percentage": 50.93, "elapsed_time": "2:01:05", "remaining_time": "1:56:39"}
75
+ {"current_steps": 750, "total_steps": 1453, "loss": 0.564, "accuracy": 0.71875, "lr": 5.38638102524866e-07, "epoch": 0.5164400068858668, "percentage": 51.62, "elapsed_time": "2:02:41", "remaining_time": "1:55:00"}
76
+ {"current_steps": 760, "total_steps": 1453, "loss": 0.5744, "accuracy": 0.7109375, "lr": 5.309869931140015e-07, "epoch": 0.523325873644345, "percentage": 52.31, "elapsed_time": "2:04:17", "remaining_time": "1:53:19"}
77
+ {"current_steps": 770, "total_steps": 1453, "loss": 0.5347, "accuracy": 0.7093750238418579, "lr": 5.233358837031369e-07, "epoch": 0.5302117404028232, "percentage": 52.99, "elapsed_time": "2:05:55", "remaining_time": "1:51:41"}
78
+ {"current_steps": 780, "total_steps": 1453, "loss": 0.5347, "accuracy": 0.7312500476837158, "lr": 5.156847742922723e-07, "epoch": 0.5370976071613014, "percentage": 53.68, "elapsed_time": "2:07:32", "remaining_time": "1:50:02"}
79
+ {"current_steps": 790, "total_steps": 1453, "loss": 0.5585, "accuracy": 0.7203124761581421, "lr": 5.080336648814077e-07, "epoch": 0.5439834739197796, "percentage": 54.37, "elapsed_time": "2:09:07", "remaining_time": "1:48:21"}
80
+ {"current_steps": 800, "total_steps": 1453, "loss": 0.5629, "accuracy": 0.7328124642372131, "lr": 5.003825554705431e-07, "epoch": 0.5508693406782579, "percentage": 55.06, "elapsed_time": "2:10:43", "remaining_time": "1:46:42"}
81
+ {"current_steps": 810, "total_steps": 1453, "loss": 0.5331, "accuracy": 0.7515624761581421, "lr": 4.927314460596787e-07, "epoch": 0.5577552074367361, "percentage": 55.75, "elapsed_time": "2:12:19", "remaining_time": "1:45:02"}
82
+ {"current_steps": 820, "total_steps": 1453, "loss": 0.5524, "accuracy": 0.7250000238418579, "lr": 4.850803366488141e-07, "epoch": 0.5646410741952144, "percentage": 56.43, "elapsed_time": "2:13:56", "remaining_time": "1:43:24"}
83
+ {"current_steps": 830, "total_steps": 1453, "loss": 0.5671, "accuracy": 0.7281249761581421, "lr": 4.774292272379495e-07, "epoch": 0.5715269409536925, "percentage": 57.12, "elapsed_time": "2:15:34", "remaining_time": "1:41:45"}
84
+ {"current_steps": 840, "total_steps": 1453, "loss": 0.5342, "accuracy": 0.7359375357627869, "lr": 4.697781178270849e-07, "epoch": 0.5784128077121707, "percentage": 57.81, "elapsed_time": "2:17:10", "remaining_time": "1:40:06"}
85
+ {"current_steps": 850, "total_steps": 1453, "loss": 0.5385, "accuracy": 0.753125011920929, "lr": 4.621270084162203e-07, "epoch": 0.585298674470649, "percentage": 58.5, "elapsed_time": "2:18:48", "remaining_time": "1:38:27"}
86
+ {"current_steps": 860, "total_steps": 1453, "loss": 0.564, "accuracy": 0.7109375, "lr": 4.5447589900535577e-07, "epoch": 0.5921845412291272, "percentage": 59.19, "elapsed_time": "2:20:23", "remaining_time": "1:36:48"}
87
+ {"current_steps": 870, "total_steps": 1453, "loss": 0.5551, "accuracy": 0.7406250238418579, "lr": 4.4682478959449117e-07, "epoch": 0.5990704079876055, "percentage": 59.88, "elapsed_time": "2:22:00", "remaining_time": "1:35:09"}
88
+ {"current_steps": 880, "total_steps": 1453, "loss": 0.5571, "accuracy": 0.7046875357627869, "lr": 4.391736801836266e-07, "epoch": 0.6059562747460837, "percentage": 60.56, "elapsed_time": "2:23:36", "remaining_time": "1:33:30"}
89
+ {"current_steps": 890, "total_steps": 1453, "loss": 0.5519, "accuracy": 0.75, "lr": 4.315225707727621e-07, "epoch": 0.6128421415045618, "percentage": 61.25, "elapsed_time": "2:25:15", "remaining_time": "1:31:53"}
90
+ {"current_steps": 900, "total_steps": 1453, "loss": 0.5094, "accuracy": 0.7406250238418579, "lr": 4.238714613618974e-07, "epoch": 0.6197280082630401, "percentage": 61.94, "elapsed_time": "2:26:50", "remaining_time": "1:30:13"}
91
+ {"current_steps": 910, "total_steps": 1453, "loss": 0.5408, "accuracy": 0.7265625, "lr": 4.162203519510329e-07, "epoch": 0.6266138750215183, "percentage": 62.63, "elapsed_time": "2:28:26", "remaining_time": "1:28:34"}
92
+ {"current_steps": 920, "total_steps": 1453, "loss": 0.5653, "accuracy": 0.7312500476837158, "lr": 4.085692425401683e-07, "epoch": 0.6334997417799966, "percentage": 63.32, "elapsed_time": "2:30:04", "remaining_time": "1:26:56"}
93
+ {"current_steps": 930, "total_steps": 1453, "loss": 0.5285, "accuracy": 0.729687511920929, "lr": 4.0091813312930373e-07, "epoch": 0.6403856085384748, "percentage": 64.01, "elapsed_time": "2:31:41", "remaining_time": "1:25:18"}
94
+ {"current_steps": 940, "total_steps": 1453, "loss": 0.5237, "accuracy": 0.7437500357627869, "lr": 3.932670237184392e-07, "epoch": 0.6472714752969531, "percentage": 64.69, "elapsed_time": "2:33:19", "remaining_time": "1:23:40"}
95
+ {"current_steps": 950, "total_steps": 1453, "loss": 0.5382, "accuracy": 0.745312511920929, "lr": 3.856159143075746e-07, "epoch": 0.6541573420554312, "percentage": 65.38, "elapsed_time": "2:34:55", "remaining_time": "1:22:01"}
96
+ {"current_steps": 960, "total_steps": 1453, "loss": 0.5277, "accuracy": 0.7390625476837158, "lr": 3.7796480489671e-07, "epoch": 0.6610432088139094, "percentage": 66.07, "elapsed_time": "2:36:31", "remaining_time": "1:20:22"}
97
+ {"current_steps": 970, "total_steps": 1453, "loss": 0.5169, "accuracy": 0.7546875476837158, "lr": 3.7031369548584544e-07, "epoch": 0.6679290755723877, "percentage": 66.76, "elapsed_time": "2:38:07", "remaining_time": "1:18:44"}
98
+ {"current_steps": 980, "total_steps": 1453, "loss": 0.5366, "accuracy": 0.737500011920929, "lr": 3.6266258607498084e-07, "epoch": 0.6748149423308659, "percentage": 67.45, "elapsed_time": "2:39:44", "remaining_time": "1:17:06"}
99
+ {"current_steps": 990, "total_steps": 1453, "loss": 0.5549, "accuracy": 0.7437500357627869, "lr": 3.550114766641163e-07, "epoch": 0.6817008090893442, "percentage": 68.13, "elapsed_time": "2:41:21", "remaining_time": "1:15:27"}
100
+ {"current_steps": 1000, "total_steps": 1453, "loss": 0.5436, "accuracy": 0.721875011920929, "lr": 3.473603672532517e-07, "epoch": 0.6885866758478223, "percentage": 68.82, "elapsed_time": "2:42:56", "remaining_time": "1:13:48"}
101
+ {"current_steps": 1010, "total_steps": 1453, "loss": 0.5122, "accuracy": 0.7468750476837158, "lr": 3.3970925784238715e-07, "epoch": 0.6954725426063005, "percentage": 69.51, "elapsed_time": "2:44:54", "remaining_time": "1:12:19"}
102
+ {"current_steps": 1020, "total_steps": 1453, "loss": 0.5261, "accuracy": 0.7406250238418579, "lr": 3.3205814843152255e-07, "epoch": 0.7023584093647788, "percentage": 70.2, "elapsed_time": "2:46:30", "remaining_time": "1:10:41"}
103
+ {"current_steps": 1030, "total_steps": 1453, "loss": 0.5143, "accuracy": 0.7562499642372131, "lr": 3.2440703902065795e-07, "epoch": 0.709244276123257, "percentage": 70.89, "elapsed_time": "2:48:07", "remaining_time": "1:09:02"}
104
+ {"current_steps": 1040, "total_steps": 1453, "loss": 0.5347, "accuracy": 0.7484375238418579, "lr": 3.167559296097934e-07, "epoch": 0.7161301428817353, "percentage": 71.58, "elapsed_time": "2:49:44", "remaining_time": "1:07:24"}
105
+ {"current_steps": 1050, "total_steps": 1453, "loss": 0.5281, "accuracy": 0.7437499761581421, "lr": 3.091048201989288e-07, "epoch": 0.7230160096402135, "percentage": 72.26, "elapsed_time": "2:51:21", "remaining_time": "1:05:46"}
106
+ {"current_steps": 1060, "total_steps": 1453, "loss": 0.5556, "accuracy": 0.7328125238418579, "lr": 3.0145371078806426e-07, "epoch": 0.7299018763986916, "percentage": 72.95, "elapsed_time": "2:52:59", "remaining_time": "1:04:08"}
107
+ {"current_steps": 1070, "total_steps": 1453, "loss": 0.518, "accuracy": 0.7437500357627869, "lr": 2.938026013771997e-07, "epoch": 0.7367877431571699, "percentage": 73.64, "elapsed_time": "2:54:35", "remaining_time": "1:02:29"}
108
+ {"current_steps": 1080, "total_steps": 1453, "loss": 0.4957, "accuracy": 0.770312488079071, "lr": 2.861514919663351e-07, "epoch": 0.7436736099156481, "percentage": 74.33, "elapsed_time": "2:56:12", "remaining_time": "1:00:51"}
109
+ {"current_steps": 1090, "total_steps": 1453, "loss": 0.5508, "accuracy": 0.7328125238418579, "lr": 2.785003825554705e-07, "epoch": 0.7505594766741264, "percentage": 75.02, "elapsed_time": "2:57:47", "remaining_time": "0:59:12"}
110
+ {"current_steps": 1100, "total_steps": 1453, "loss": 0.5145, "accuracy": 0.753125011920929, "lr": 2.7084927314460597e-07, "epoch": 0.7574453434326046, "percentage": 75.71, "elapsed_time": "2:59:25", "remaining_time": "0:57:34"}
111
+ {"current_steps": 1110, "total_steps": 1453, "loss": 0.5493, "accuracy": 0.7265625596046448, "lr": 2.6319816373374137e-07, "epoch": 0.7643312101910829, "percentage": 76.39, "elapsed_time": "3:01:01", "remaining_time": "0:55:56"}
112
+ {"current_steps": 1120, "total_steps": 1453, "loss": 0.5556, "accuracy": 0.715624988079071, "lr": 2.555470543228768e-07, "epoch": 0.771217076949561, "percentage": 77.08, "elapsed_time": "3:02:37", "remaining_time": "0:54:17"}
113
+ {"current_steps": 1130, "total_steps": 1453, "loss": 0.5247, "accuracy": 0.753125011920929, "lr": 2.4789594491201223e-07, "epoch": 0.7781029437080392, "percentage": 77.77, "elapsed_time": "3:04:14", "remaining_time": "0:52:39"}
114
+ {"current_steps": 1140, "total_steps": 1453, "loss": 0.5388, "accuracy": 0.7437500357627869, "lr": 2.402448355011477e-07, "epoch": 0.7849888104665175, "percentage": 78.46, "elapsed_time": "3:05:51", "remaining_time": "0:51:01"}
115
+ {"current_steps": 1150, "total_steps": 1453, "loss": 0.5362, "accuracy": 0.7515624761581421, "lr": 2.3259372609028308e-07, "epoch": 0.7918746772249957, "percentage": 79.15, "elapsed_time": "3:07:29", "remaining_time": "0:49:24"}
116
+ {"current_steps": 1160, "total_steps": 1453, "loss": 0.5209, "accuracy": 0.7640625238418579, "lr": 2.2494261667941848e-07, "epoch": 0.798760543983474, "percentage": 79.83, "elapsed_time": "3:09:06", "remaining_time": "0:47:45"}
117
+ {"current_steps": 1170, "total_steps": 1453, "loss": 0.5456, "accuracy": 0.7406250238418579, "lr": 2.1729150726855394e-07, "epoch": 0.8056464107419521, "percentage": 80.52, "elapsed_time": "3:10:41", "remaining_time": "0:46:07"}
118
+ {"current_steps": 1180, "total_steps": 1453, "loss": 0.5012, "accuracy": 0.7406250238418579, "lr": 2.0964039785768936e-07, "epoch": 0.8125322775004303, "percentage": 81.21, "elapsed_time": "3:12:18", "remaining_time": "0:44:29"}
119
+ {"current_steps": 1190, "total_steps": 1453, "loss": 0.5174, "accuracy": 0.7609375715255737, "lr": 2.019892884468248e-07, "epoch": 0.8194181442589086, "percentage": 81.9, "elapsed_time": "3:13:55", "remaining_time": "0:42:51"}
120
+ {"current_steps": 1200, "total_steps": 1453, "loss": 0.5403, "accuracy": 0.714062511920929, "lr": 1.943381790359602e-07, "epoch": 0.8263040110173868, "percentage": 82.59, "elapsed_time": "3:15:33", "remaining_time": "0:41:13"}
121
+ {"current_steps": 1210, "total_steps": 1453, "loss": 0.5334, "accuracy": 0.7484375238418579, "lr": 1.8668706962509562e-07, "epoch": 0.8331898777758651, "percentage": 83.28, "elapsed_time": "3:17:08", "remaining_time": "0:39:35"}
122
+ {"current_steps": 1220, "total_steps": 1453, "loss": 0.5213, "accuracy": 0.7359374761581421, "lr": 1.7903596021423107e-07, "epoch": 0.8400757445343433, "percentage": 83.96, "elapsed_time": "3:18:45", "remaining_time": "0:37:57"}
123
+ {"current_steps": 1230, "total_steps": 1453, "loss": 0.493, "accuracy": 0.7718750238418579, "lr": 1.7138485080336647e-07, "epoch": 0.8469616112928214, "percentage": 84.65, "elapsed_time": "3:20:23", "remaining_time": "0:36:19"}
124
+ {"current_steps": 1240, "total_steps": 1453, "loss": 0.4972, "accuracy": 0.7671875357627869, "lr": 1.637337413925019e-07, "epoch": 0.8538474780512997, "percentage": 85.34, "elapsed_time": "3:21:59", "remaining_time": "0:34:41"}
125
+ {"current_steps": 1250, "total_steps": 1453, "loss": 0.5792, "accuracy": 0.7156250476837158, "lr": 1.5608263198163733e-07, "epoch": 0.8607333448097779, "percentage": 86.03, "elapsed_time": "3:23:36", "remaining_time": "0:33:04"}
126
+ {"current_steps": 1260, "total_steps": 1453, "loss": 0.5382, "accuracy": 0.71875, "lr": 1.4843152257077276e-07, "epoch": 0.8676192115682562, "percentage": 86.72, "elapsed_time": "3:25:14", "remaining_time": "0:31:26"}
127
+ {"current_steps": 1270, "total_steps": 1453, "loss": 0.5103, "accuracy": 0.7515625357627869, "lr": 1.4078041315990818e-07, "epoch": 0.8745050783267344, "percentage": 87.41, "elapsed_time": "3:26:50", "remaining_time": "0:29:48"}
128
+ {"current_steps": 1280, "total_steps": 1453, "loss": 0.5217, "accuracy": 0.7484375238418579, "lr": 1.331293037490436e-07, "epoch": 0.8813909450852127, "percentage": 88.09, "elapsed_time": "3:28:26", "remaining_time": "0:28:10"}
129
+ {"current_steps": 1290, "total_steps": 1453, "loss": 0.5456, "accuracy": 0.746874988079071, "lr": 1.25478194338179e-07, "epoch": 0.8882768118436908, "percentage": 88.78, "elapsed_time": "3:30:03", "remaining_time": "0:26:32"}
130
+ {"current_steps": 1300, "total_steps": 1453, "loss": 0.525, "accuracy": 0.745312511920929, "lr": 1.1782708492731445e-07, "epoch": 0.895162678602169, "percentage": 89.47, "elapsed_time": "3:31:42", "remaining_time": "0:24:54"}
131
+ {"current_steps": 1310, "total_steps": 1453, "loss": 0.521, "accuracy": 0.7250000238418579, "lr": 1.1017597551644987e-07, "epoch": 0.9020485453606473, "percentage": 90.16, "elapsed_time": "3:33:18", "remaining_time": "0:23:17"}
132
+ {"current_steps": 1320, "total_steps": 1453, "loss": 0.5178, "accuracy": 0.745312511920929, "lr": 1.0252486610558531e-07, "epoch": 0.9089344121191255, "percentage": 90.85, "elapsed_time": "3:34:54", "remaining_time": "0:21:39"}
133
+ {"current_steps": 1330, "total_steps": 1453, "loss": 0.5512, "accuracy": 0.721875011920929, "lr": 9.487375669472072e-08, "epoch": 0.9158202788776038, "percentage": 91.53, "elapsed_time": "3:36:32", "remaining_time": "0:20:01"}
134
+ {"current_steps": 1340, "total_steps": 1453, "loss": 0.525, "accuracy": 0.7437500357627869, "lr": 8.722264728385616e-08, "epoch": 0.9227061456360819, "percentage": 92.22, "elapsed_time": "3:38:08", "remaining_time": "0:18:23"}
135
+ {"current_steps": 1350, "total_steps": 1453, "loss": 0.529, "accuracy": 0.7265625, "lr": 7.957153787299158e-08, "epoch": 0.9295920123945601, "percentage": 92.91, "elapsed_time": "3:39:44", "remaining_time": "0:16:45"}
136
+ {"current_steps": 1360, "total_steps": 1453, "loss": 0.4741, "accuracy": 0.770312488079071, "lr": 7.1920428462127e-08, "epoch": 0.9364778791530384, "percentage": 93.6, "elapsed_time": "3:41:20", "remaining_time": "0:15:08"}
137
+ {"current_steps": 1370, "total_steps": 1453, "loss": 0.5188, "accuracy": 0.7515625357627869, "lr": 6.426931905126243e-08, "epoch": 0.9433637459115166, "percentage": 94.29, "elapsed_time": "3:42:55", "remaining_time": "0:13:30"}
138
+ {"current_steps": 1380, "total_steps": 1453, "loss": 0.5022, "accuracy": 0.7671874761581421, "lr": 5.661820964039785e-08, "epoch": 0.9502496126699949, "percentage": 94.98, "elapsed_time": "3:44:31", "remaining_time": "0:11:52"}
139
+ {"current_steps": 1390, "total_steps": 1453, "loss": 0.5119, "accuracy": 0.753125011920929, "lr": 4.896710022953328e-08, "epoch": 0.9571354794284731, "percentage": 95.66, "elapsed_time": "3:46:08", "remaining_time": "0:10:14"}
140
+ {"current_steps": 1400, "total_steps": 1453, "loss": 0.5312, "accuracy": 0.745312511920929, "lr": 4.1315990818668707e-08, "epoch": 0.9640213461869512, "percentage": 96.35, "elapsed_time": "3:47:44", "remaining_time": "0:08:37"}
141
+ {"current_steps": 1410, "total_steps": 1453, "loss": 0.4874, "accuracy": 0.7718750238418579, "lr": 3.366488140780413e-08, "epoch": 0.9709072129454295, "percentage": 97.04, "elapsed_time": "3:49:20", "remaining_time": "0:06:59"}
142
+ {"current_steps": 1420, "total_steps": 1453, "loss": 0.5022, "accuracy": 0.7671874761581421, "lr": 2.6013771996939555e-08, "epoch": 0.9777930797039077, "percentage": 97.73, "elapsed_time": "3:50:59", "remaining_time": "0:05:22"}
143
+ {"current_steps": 1430, "total_steps": 1453, "loss": 0.4884, "accuracy": 0.778124988079071, "lr": 1.8362662586074982e-08, "epoch": 0.984678946462386, "percentage": 98.42, "elapsed_time": "3:52:37", "remaining_time": "0:03:44"}
144
+ {"current_steps": 1440, "total_steps": 1453, "loss": 0.5198, "accuracy": 0.734375, "lr": 1.0711553175210405e-08, "epoch": 0.9915648132208642, "percentage": 99.11, "elapsed_time": "3:54:14", "remaining_time": "0:02:06"}
145
+ {"current_steps": 1450, "total_steps": 1453, "loss": 0.4988, "accuracy": 0.7734375, "lr": 3.06044376434583e-09, "epoch": 0.9984506799793424, "percentage": 99.79, "elapsed_time": "3:55:51", "remaining_time": "0:00:29"}
146
+ {"current_steps": 1453, "total_steps": 1453, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "3:56:33", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1453,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.006885866758478224,
14
+ "grad_norm": 21.357118272730254,
15
+ "learning_rate": 6.164383561643836e-08,
16
+ "logits/chosen": 0.028968211263418198,
17
+ "logits/rejected": 0.01612178608775139,
18
+ "logps/chosen": -197.14283752441406,
19
+ "logps/rejected": -195.18118286132812,
20
+ "loss": 0.694,
21
+ "rewards/accuracies": 0.3843749761581421,
22
+ "rewards/chosen": -0.0008211384410969913,
23
+ "rewards/margins": -0.0012691060546785593,
24
+ "rewards/rejected": 0.00044796784641221166,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.013771733516956448,
29
+ "grad_norm": 22.78927104925093,
30
+ "learning_rate": 1.3013698630136985e-07,
31
+ "logits/chosen": 0.05674262344837189,
32
+ "logits/rejected": 0.0543050691485405,
33
+ "logps/chosen": -196.9640655517578,
34
+ "logps/rejected": -198.33688354492188,
35
+ "loss": 0.6943,
36
+ "rewards/accuracies": 0.5015624761581421,
37
+ "rewards/chosen": 0.0005912931519560516,
38
+ "rewards/margins": -0.0018875878304243088,
39
+ "rewards/rejected": 0.0024788810405880213,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.02065760027543467,
44
+ "grad_norm": 20.1623734420225,
45
+ "learning_rate": 1.9863013698630135e-07,
46
+ "logits/chosen": 0.030807409435510635,
47
+ "logits/rejected": 0.015413804911077023,
48
+ "logps/chosen": -200.63473510742188,
49
+ "logps/rejected": -193.79344177246094,
50
+ "loss": 0.6931,
51
+ "rewards/accuracies": 0.4937500059604645,
52
+ "rewards/chosen": -0.00030722294468432665,
53
+ "rewards/margins": 0.0004742158344015479,
54
+ "rewards/rejected": -0.0007814386044628918,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.027543467033912895,
59
+ "grad_norm": 19.302715968951826,
60
+ "learning_rate": 2.671232876712329e-07,
61
+ "logits/chosen": 0.019373048096895218,
62
+ "logits/rejected": 0.015362609177827835,
63
+ "logps/chosen": -193.3689422607422,
64
+ "logps/rejected": -193.8937530517578,
65
+ "loss": 0.6934,
66
+ "rewards/accuracies": 0.53125,
67
+ "rewards/chosen": 0.00165457627736032,
68
+ "rewards/margins": -0.00010138965444639325,
69
+ "rewards/rejected": 0.001755966106429696,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.03442933379239112,
74
+ "grad_norm": 19.28639543966955,
75
+ "learning_rate": 3.3561643835616436e-07,
76
+ "logits/chosen": 0.02099643275141716,
77
+ "logits/rejected": 0.017172984778881073,
78
+ "logps/chosen": -195.41319274902344,
79
+ "logps/rejected": -192.13592529296875,
80
+ "loss": 0.6927,
81
+ "rewards/accuracies": 0.5265625715255737,
82
+ "rewards/chosen": 0.003476072335615754,
83
+ "rewards/margins": 0.0012514767004176974,
84
+ "rewards/rejected": 0.0022245957516133785,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.04131520055086934,
89
+ "grad_norm": 18.35177129376054,
90
+ "learning_rate": 4.041095890410959e-07,
91
+ "logits/chosen": 0.028478674590587616,
92
+ "logits/rejected": 0.045705921947956085,
93
+ "logps/chosen": -193.37350463867188,
94
+ "logps/rejected": -193.1693115234375,
95
+ "loss": 0.6917,
96
+ "rewards/accuracies": 0.5062500238418579,
97
+ "rewards/chosen": 0.007831841707229614,
98
+ "rewards/margins": 0.0032318192534148693,
99
+ "rewards/rejected": 0.004600022919476032,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.04820106730934756,
104
+ "grad_norm": 20.04933062392871,
105
+ "learning_rate": 4.726027397260274e-07,
106
+ "logits/chosen": 0.030644051730632782,
107
+ "logits/rejected": 0.010686805471777916,
108
+ "logps/chosen": -195.0802459716797,
109
+ "logps/rejected": -192.6405792236328,
110
+ "loss": 0.6929,
111
+ "rewards/accuracies": 0.503125011920929,
112
+ "rewards/chosen": 0.011022167280316353,
113
+ "rewards/margins": 0.0009471712401136756,
114
+ "rewards/rejected": 0.010074996389448643,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.05508693406782579,
119
+ "grad_norm": 20.668505160828992,
120
+ "learning_rate": 5.410958904109589e-07,
121
+ "logits/chosen": 0.026326147839426994,
122
+ "logits/rejected": 0.04052453488111496,
123
+ "logps/chosen": -197.54429626464844,
124
+ "logps/rejected": -190.3190155029297,
125
+ "loss": 0.6908,
126
+ "rewards/accuracies": 0.5437500476837158,
127
+ "rewards/chosen": 0.01964644528925419,
128
+ "rewards/margins": 0.005270515568554401,
129
+ "rewards/rejected": 0.014375930652022362,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.06197280082630401,
134
+ "grad_norm": 21.121196165929355,
135
+ "learning_rate": 6.095890410958904e-07,
136
+ "logits/chosen": -0.019326386973261833,
137
+ "logits/rejected": -0.022055240347981453,
138
+ "logps/chosen": -186.5928955078125,
139
+ "logps/rejected": -186.8990478515625,
140
+ "loss": 0.6894,
141
+ "rewards/accuracies": 0.5625,
142
+ "rewards/chosen": 0.024418987333774567,
143
+ "rewards/margins": 0.00805748626589775,
144
+ "rewards/rejected": 0.016361497342586517,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.06885866758478223,
149
+ "grad_norm": 21.91548535538128,
150
+ "learning_rate": 6.78082191780822e-07,
151
+ "logits/chosen": 0.011051855981349945,
152
+ "logits/rejected": 0.012394784018397331,
153
+ "logps/chosen": -195.57264709472656,
154
+ "logps/rejected": -193.32449340820312,
155
+ "loss": 0.6901,
156
+ "rewards/accuracies": 0.5531250238418579,
157
+ "rewards/chosen": 0.03320036083459854,
158
+ "rewards/margins": 0.006988395471125841,
159
+ "rewards/rejected": 0.026211963966488838,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07574453434326046,
164
+ "grad_norm": 21.717264270490062,
165
+ "learning_rate": 7.465753424657533e-07,
166
+ "logits/chosen": 0.012640159577131271,
167
+ "logits/rejected": 0.010473139584064484,
168
+ "logps/chosen": -186.4929962158203,
169
+ "logps/rejected": -187.80308532714844,
170
+ "loss": 0.6856,
171
+ "rewards/accuracies": 0.6171875,
172
+ "rewards/chosen": 0.04811429604887962,
173
+ "rewards/margins": 0.016630370169878006,
174
+ "rewards/rejected": 0.03148392587900162,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.08263040110173868,
179
+ "grad_norm": 19.33792551477739,
180
+ "learning_rate": 8.150684931506849e-07,
181
+ "logits/chosen": 0.025827227160334587,
182
+ "logits/rejected": 0.02327391505241394,
183
+ "logps/chosen": -190.1212921142578,
184
+ "logps/rejected": -187.36219787597656,
185
+ "loss": 0.6837,
186
+ "rewards/accuracies": 0.5953124761581421,
187
+ "rewards/chosen": 0.06050765886902809,
188
+ "rewards/margins": 0.020764853805303574,
189
+ "rewards/rejected": 0.039742808789014816,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08951626786021691,
194
+ "grad_norm": 20.381956172548726,
195
+ "learning_rate": 8.835616438356164e-07,
196
+ "logits/chosen": -0.005789112765341997,
197
+ "logits/rejected": -0.013411665335297585,
198
+ "logps/chosen": -192.898681640625,
199
+ "logps/rejected": -190.70518493652344,
200
+ "loss": 0.682,
201
+ "rewards/accuracies": 0.6031250357627869,
202
+ "rewards/chosen": 0.08533641695976257,
203
+ "rewards/margins": 0.025243345648050308,
204
+ "rewards/rejected": 0.060093071311712265,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09640213461869512,
209
+ "grad_norm": 20.606801959315014,
210
+ "learning_rate": 9.520547945205479e-07,
211
+ "logits/chosen": -0.016342442482709885,
212
+ "logits/rejected": -0.025571543723344803,
213
+ "logps/chosen": -192.14207458496094,
214
+ "logps/rejected": -187.4148712158203,
215
+ "loss": 0.6789,
216
+ "rewards/accuracies": 0.6000000238418579,
217
+ "rewards/chosen": 0.10957922041416168,
218
+ "rewards/margins": 0.033595748245716095,
219
+ "rewards/rejected": 0.07598347216844559,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.10328800137717335,
224
+ "grad_norm": 23.262305629362952,
225
+ "learning_rate": 9.977046671767407e-07,
226
+ "logits/chosen": -0.0596565343439579,
227
+ "logits/rejected": -0.07673357427120209,
228
+ "logps/chosen": -188.7398223876953,
229
+ "logps/rejected": -188.24871826171875,
230
+ "loss": 0.6775,
231
+ "rewards/accuracies": 0.5906250476837158,
232
+ "rewards/chosen": 0.12745949625968933,
233
+ "rewards/margins": 0.03920283168554306,
234
+ "rewards/rejected": 0.08825664967298508,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.11017386813565158,
239
+ "grad_norm": 19.7846291123117,
240
+ "learning_rate": 9.90053557765876e-07,
241
+ "logits/chosen": -0.047244228422641754,
242
+ "logits/rejected": -0.04694109782576561,
243
+ "logps/chosen": -181.18203735351562,
244
+ "logps/rejected": -178.70843505859375,
245
+ "loss": 0.6647,
246
+ "rewards/accuracies": 0.6546875238418579,
247
+ "rewards/chosen": 0.15349924564361572,
248
+ "rewards/margins": 0.06813298165798187,
249
+ "rewards/rejected": 0.08536626398563385,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.1170597348941298,
254
+ "grad_norm": 21.472880138840512,
255
+ "learning_rate": 9.824024483550113e-07,
256
+ "logits/chosen": -0.14841578900814056,
257
+ "logits/rejected": -0.12123996764421463,
258
+ "logps/chosen": -190.9058837890625,
259
+ "logps/rejected": -187.15444946289062,
260
+ "loss": 0.6698,
261
+ "rewards/accuracies": 0.604687511920929,
262
+ "rewards/chosen": 0.16753645241260529,
263
+ "rewards/margins": 0.06376808881759644,
264
+ "rewards/rejected": 0.10376835614442825,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.12394560165260803,
269
+ "grad_norm": 23.964636488520863,
270
+ "learning_rate": 9.747513389441468e-07,
271
+ "logits/chosen": -0.13839945197105408,
272
+ "logits/rejected": -0.1292952597141266,
273
+ "logps/chosen": -188.83291625976562,
274
+ "logps/rejected": -189.17947387695312,
275
+ "loss": 0.6713,
276
+ "rewards/accuracies": 0.612500011920929,
277
+ "rewards/chosen": 0.15886150300502777,
278
+ "rewards/margins": 0.06130904704332352,
279
+ "rewards/rejected": 0.09755245596170425,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.13083146841108625,
284
+ "grad_norm": 21.014923075997917,
285
+ "learning_rate": 9.671002295332823e-07,
286
+ "logits/chosen": -0.17998671531677246,
287
+ "logits/rejected": -0.16726107895374298,
288
+ "logps/chosen": -186.77247619628906,
289
+ "logps/rejected": -183.4087371826172,
290
+ "loss": 0.6543,
291
+ "rewards/accuracies": 0.6296875476837158,
292
+ "rewards/chosen": 0.2054910957813263,
293
+ "rewards/margins": 0.11184875667095184,
294
+ "rewards/rejected": 0.09364232420921326,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.13771733516956447,
299
+ "grad_norm": 20.66388348945257,
300
+ "learning_rate": 9.594491201224178e-07,
301
+ "logits/chosen": -0.14593414962291718,
302
+ "logits/rejected": -0.13679011166095734,
303
+ "logps/chosen": -190.45223999023438,
304
+ "logps/rejected": -191.02951049804688,
305
+ "loss": 0.6718,
306
+ "rewards/accuracies": 0.609375,
307
+ "rewards/chosen": 0.16157108545303345,
308
+ "rewards/margins": 0.07207615673542023,
309
+ "rewards/rejected": 0.08949493616819382,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.14460320192804269,
314
+ "grad_norm": 29.99539925690682,
315
+ "learning_rate": 9.517980107115531e-07,
316
+ "logits/chosen": -0.15793928503990173,
317
+ "logits/rejected": -0.15555617213249207,
318
+ "logps/chosen": -196.1856231689453,
319
+ "logps/rejected": -190.77688598632812,
320
+ "loss": 0.657,
321
+ "rewards/accuracies": 0.6312500238418579,
322
+ "rewards/chosen": 0.15518102049827576,
323
+ "rewards/margins": 0.10747373104095459,
324
+ "rewards/rejected": 0.04770728200674057,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.15148906868652093,
329
+ "grad_norm": 22.484571855499286,
330
+ "learning_rate": 9.441469013006885e-07,
331
+ "logits/chosen": -0.09849053621292114,
332
+ "logits/rejected": -0.10602966696023941,
333
+ "logps/chosen": -188.15655517578125,
334
+ "logps/rejected": -189.34217834472656,
335
+ "loss": 0.6495,
336
+ "rewards/accuracies": 0.6375000476837158,
337
+ "rewards/chosen": 0.14013966917991638,
338
+ "rewards/margins": 0.12994103133678436,
339
+ "rewards/rejected": 0.010198642499744892,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.15837493544499914,
344
+ "grad_norm": 23.14684419321817,
345
+ "learning_rate": 9.364957918898239e-07,
346
+ "logits/chosen": -0.12771211564540863,
347
+ "logits/rejected": -0.12738053500652313,
348
+ "logps/chosen": -188.27279663085938,
349
+ "logps/rejected": -189.2296142578125,
350
+ "loss": 0.6483,
351
+ "rewards/accuracies": 0.6421874761581421,
352
+ "rewards/chosen": 0.12199673056602478,
353
+ "rewards/margins": 0.13607241213321686,
354
+ "rewards/rejected": -0.014075696468353271,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.16526080220347736,
359
+ "grad_norm": 21.14311199895393,
360
+ "learning_rate": 9.288446824789594e-07,
361
+ "logits/chosen": -0.1267087310552597,
362
+ "logits/rejected": -0.11214447021484375,
363
+ "logps/chosen": -183.26416015625,
364
+ "logps/rejected": -186.4851837158203,
365
+ "loss": 0.641,
366
+ "rewards/accuracies": 0.643750011920929,
367
+ "rewards/chosen": 0.10954004526138306,
368
+ "rewards/margins": 0.14909771084785461,
369
+ "rewards/rejected": -0.03955767676234245,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17214666896195557,
374
+ "grad_norm": 25.009711908956962,
375
+ "learning_rate": 9.211935730680948e-07,
376
+ "logits/chosen": -0.14686468243598938,
377
+ "logits/rejected": -0.12413311004638672,
378
+ "logps/chosen": -193.5541229248047,
379
+ "logps/rejected": -197.37326049804688,
380
+ "loss": 0.6497,
381
+ "rewards/accuracies": 0.6328125596046448,
382
+ "rewards/chosen": 0.06355239450931549,
383
+ "rewards/margins": 0.1444573700428009,
384
+ "rewards/rejected": -0.08090498298406601,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.17903253572043382,
389
+ "grad_norm": 21.375848926501856,
390
+ "learning_rate": 9.135424636572303e-07,
391
+ "logits/chosen": -0.12750156223773956,
392
+ "logits/rejected": -0.11904246360063553,
393
+ "logps/chosen": -194.12649536132812,
394
+ "logps/rejected": -188.32383728027344,
395
+ "loss": 0.6466,
396
+ "rewards/accuracies": 0.6296875476837158,
397
+ "rewards/chosen": 0.060907114297151566,
398
+ "rewards/margins": 0.14842237532138824,
399
+ "rewards/rejected": -0.08751524984836578,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.18591840247891203,
404
+ "grad_norm": 23.09705200083216,
405
+ "learning_rate": 9.058913542463656e-07,
406
+ "logits/chosen": -0.20926274359226227,
407
+ "logits/rejected": -0.19603878259658813,
408
+ "logps/chosen": -193.5789337158203,
409
+ "logps/rejected": -190.8974609375,
410
+ "loss": 0.6325,
411
+ "rewards/accuracies": 0.6312500238418579,
412
+ "rewards/chosen": 0.08186967670917511,
413
+ "rewards/margins": 0.190498948097229,
414
+ "rewards/rejected": -0.1086292639374733,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19280426923739025,
419
+ "grad_norm": 21.037835718471506,
420
+ "learning_rate": 8.982402448355011e-07,
421
+ "logits/chosen": -0.17319074273109436,
422
+ "logits/rejected": -0.17468921840190887,
423
+ "logps/chosen": -192.30039978027344,
424
+ "logps/rejected": -192.83987426757812,
425
+ "loss": 0.6299,
426
+ "rewards/accuracies": 0.6625000238418579,
427
+ "rewards/chosen": 0.08289219439029694,
428
+ "rewards/margins": 0.21142172813415527,
429
+ "rewards/rejected": -0.12852953374385834,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.1996901359958685,
434
+ "grad_norm": 21.56149675955503,
435
+ "learning_rate": 8.905891354246365e-07,
436
+ "logits/chosen": -0.12733662128448486,
437
+ "logits/rejected": -0.15374454855918884,
438
+ "logps/chosen": -191.71080017089844,
439
+ "logps/rejected": -192.95883178710938,
440
+ "loss": 0.6268,
441
+ "rewards/accuracies": 0.6671875715255737,
442
+ "rewards/chosen": 0.02507857419550419,
443
+ "rewards/margins": 0.211945578455925,
444
+ "rewards/rejected": -0.18686699867248535,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.2065760027543467,
449
+ "grad_norm": 21.105833082853255,
450
+ "learning_rate": 8.829380260137719e-07,
451
+ "logits/chosen": -0.14651824533939362,
452
+ "logits/rejected": -0.12377053499221802,
453
+ "logps/chosen": -195.86325073242188,
454
+ "logps/rejected": -201.83001708984375,
455
+ "loss": 0.6151,
456
+ "rewards/accuracies": 0.7046874761581421,
457
+ "rewards/chosen": 0.008288959972560406,
458
+ "rewards/margins": 0.25510185956954956,
459
+ "rewards/rejected": -0.2468128800392151,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.21346186951282492,
464
+ "grad_norm": 21.239404607014603,
465
+ "learning_rate": 8.752869166029074e-07,
466
+ "logits/chosen": -0.13711729645729065,
467
+ "logits/rejected": -0.11297348141670227,
468
+ "logps/chosen": -197.0601043701172,
469
+ "logps/rejected": -203.13510131835938,
470
+ "loss": 0.6302,
471
+ "rewards/accuracies": 0.6578125357627869,
472
+ "rewards/chosen": -0.033613648265600204,
473
+ "rewards/margins": 0.23345637321472168,
474
+ "rewards/rejected": -0.2670700252056122,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.22034773627130316,
479
+ "grad_norm": 28.358576456872942,
480
+ "learning_rate": 8.676358071920427e-07,
481
+ "logits/chosen": -0.15952648222446442,
482
+ "logits/rejected": -0.13048917055130005,
483
+ "logps/chosen": -191.40574645996094,
484
+ "logps/rejected": -194.6696319580078,
485
+ "loss": 0.6266,
486
+ "rewards/accuracies": 0.6656249761581421,
487
+ "rewards/chosen": -0.06403035670518875,
488
+ "rewards/margins": 0.23444205522537231,
489
+ "rewards/rejected": -0.29847240447998047,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.22723360302978138,
494
+ "grad_norm": 21.824170653831928,
495
+ "learning_rate": 8.599846977811782e-07,
496
+ "logits/chosen": -0.1640833169221878,
497
+ "logits/rejected": -0.1528327465057373,
498
+ "logps/chosen": -187.84799194335938,
499
+ "logps/rejected": -201.14830017089844,
500
+ "loss": 0.612,
501
+ "rewards/accuracies": 0.6796875,
502
+ "rewards/chosen": -0.07599131762981415,
503
+ "rewards/margins": 0.2906650900840759,
504
+ "rewards/rejected": -0.36665642261505127,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.2341194697882596,
509
+ "grad_norm": 21.074085075809414,
510
+ "learning_rate": 8.523335883703136e-07,
511
+ "logits/chosen": -0.1743679940700531,
512
+ "logits/rejected": -0.16399502754211426,
513
+ "logps/chosen": -202.89622497558594,
514
+ "logps/rejected": -212.13699340820312,
515
+ "loss": 0.6088,
516
+ "rewards/accuracies": 0.6796875596046448,
517
+ "rewards/chosen": -0.055201247334480286,
518
+ "rewards/margins": 0.30173224210739136,
519
+ "rewards/rejected": -0.35693347454071045,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.2410053365467378,
524
+ "grad_norm": 23.097522066642618,
525
+ "learning_rate": 8.446824789594492e-07,
526
+ "logits/chosen": -0.17943890392780304,
527
+ "logits/rejected": -0.16794875264167786,
528
+ "logps/chosen": -194.87747192382812,
529
+ "logps/rejected": -194.46771240234375,
530
+ "loss": 0.6174,
531
+ "rewards/accuracies": 0.667187511920929,
532
+ "rewards/chosen": -0.12930315732955933,
533
+ "rewards/margins": 0.2889351546764374,
534
+ "rewards/rejected": -0.4182383418083191,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.24789120330521605,
539
+ "grad_norm": 22.706444342156583,
540
+ "learning_rate": 8.370313695485846e-07,
541
+ "logits/chosen": -0.19892311096191406,
542
+ "logits/rejected": -0.18966780602931976,
543
+ "logps/chosen": -190.1072998046875,
544
+ "logps/rejected": -193.8407745361328,
545
+ "loss": 0.6075,
546
+ "rewards/accuracies": 0.6765625476837158,
547
+ "rewards/chosen": -0.08109837025403976,
548
+ "rewards/margins": 0.31933993101119995,
549
+ "rewards/rejected": -0.4004383087158203,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.25477707006369427,
554
+ "grad_norm": 22.66877601351879,
555
+ "learning_rate": 8.293802601377199e-07,
556
+ "logits/chosen": -0.20010100305080414,
557
+ "logits/rejected": -0.17780742049217224,
558
+ "logps/chosen": -194.33409118652344,
559
+ "logps/rejected": -196.40740966796875,
560
+ "loss": 0.6099,
561
+ "rewards/accuracies": 0.6812500357627869,
562
+ "rewards/chosen": -0.10439629852771759,
563
+ "rewards/margins": 0.31406161189079285,
564
+ "rewards/rejected": -0.41845792531967163,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.2616629368221725,
569
+ "grad_norm": 23.861244423965402,
570
+ "learning_rate": 8.217291507268554e-07,
571
+ "logits/chosen": -0.2074911892414093,
572
+ "logits/rejected": -0.2102966606616974,
573
+ "logps/chosen": -204.8916015625,
574
+ "logps/rejected": -206.48434448242188,
575
+ "loss": 0.612,
576
+ "rewards/accuracies": 0.659375011920929,
577
+ "rewards/chosen": -0.12396355718374252,
578
+ "rewards/margins": 0.32503077387809753,
579
+ "rewards/rejected": -0.44899433851242065,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.2685488035806507,
584
+ "grad_norm": 18.426704146792513,
585
+ "learning_rate": 8.140780413159908e-07,
586
+ "logits/chosen": -0.1731143593788147,
587
+ "logits/rejected": -0.1533968597650528,
588
+ "logps/chosen": -189.8383331298828,
589
+ "logps/rejected": -195.228515625,
590
+ "loss": 0.6082,
591
+ "rewards/accuracies": 0.6796875,
592
+ "rewards/chosen": -0.16357733309268951,
593
+ "rewards/margins": 0.3212567865848541,
594
+ "rewards/rejected": -0.48483413457870483,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.27543467033912894,
599
+ "grad_norm": 22.10185350841625,
600
+ "learning_rate": 8.064269319051263e-07,
601
+ "logits/chosen": -0.19260063767433167,
602
+ "logits/rejected": -0.18926046788692474,
603
+ "logps/chosen": -203.06272888183594,
604
+ "logps/rejected": -201.2696533203125,
605
+ "loss": 0.6055,
606
+ "rewards/accuracies": 0.668749988079071,
607
+ "rewards/chosen": -0.19341671466827393,
608
+ "rewards/margins": 0.3326266407966614,
609
+ "rewards/rejected": -0.5260434150695801,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.2823205370976072,
614
+ "grad_norm": 19.998016249738562,
615
+ "learning_rate": 7.987758224942617e-07,
616
+ "logits/chosen": -0.21146024763584137,
617
+ "logits/rejected": -0.17835554480552673,
618
+ "logps/chosen": -195.14520263671875,
619
+ "logps/rejected": -204.84442138671875,
620
+ "loss": 0.5995,
621
+ "rewards/accuracies": 0.6812499761581421,
622
+ "rewards/chosen": -0.16101008653640747,
623
+ "rewards/margins": 0.37827068567276,
624
+ "rewards/rejected": -0.5392807722091675,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.28920640385608537,
629
+ "grad_norm": 23.467770681704234,
630
+ "learning_rate": 7.91124713083397e-07,
631
+ "logits/chosen": -0.21747922897338867,
632
+ "logits/rejected": -0.18482878804206848,
633
+ "logps/chosen": -196.29568481445312,
634
+ "logps/rejected": -203.74679565429688,
635
+ "loss": 0.6077,
636
+ "rewards/accuracies": 0.6578124761581421,
637
+ "rewards/chosen": -0.231543630361557,
638
+ "rewards/margins": 0.3246532380580902,
639
+ "rewards/rejected": -0.5561968684196472,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.2960922706145636,
644
+ "grad_norm": 23.246622169062537,
645
+ "learning_rate": 7.834736036725325e-07,
646
+ "logits/chosen": -0.22648438811302185,
647
+ "logits/rejected": -0.19521580636501312,
648
+ "logps/chosen": -199.969970703125,
649
+ "logps/rejected": -207.82162475585938,
650
+ "loss": 0.5838,
651
+ "rewards/accuracies": 0.7093750238418579,
652
+ "rewards/chosen": -0.21634486317634583,
653
+ "rewards/margins": 0.43085139989852905,
654
+ "rewards/rejected": -0.6471962928771973,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.30297813737304186,
659
+ "grad_norm": 21.048208882279503,
660
+ "learning_rate": 7.758224942616679e-07,
661
+ "logits/chosen": -0.25602301955223083,
662
+ "logits/rejected": -0.216285839676857,
663
+ "logps/chosen": -209.1434326171875,
664
+ "logps/rejected": -216.78334045410156,
665
+ "loss": 0.5943,
666
+ "rewards/accuracies": 0.7171875238418579,
667
+ "rewards/chosen": -0.26122772693634033,
668
+ "rewards/margins": 0.4267864227294922,
669
+ "rewards/rejected": -0.6880142092704773,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.30986400413152004,
674
+ "grad_norm": 22.59925573646321,
675
+ "learning_rate": 7.681713848508034e-07,
676
+ "logits/chosen": -0.18633471429347992,
677
+ "logits/rejected": -0.1828474998474121,
678
+ "logps/chosen": -193.50070190429688,
679
+ "logps/rejected": -200.55421447753906,
680
+ "loss": 0.6007,
681
+ "rewards/accuracies": 0.6968750357627869,
682
+ "rewards/chosen": -0.2820444107055664,
683
+ "rewards/margins": 0.3879047632217407,
684
+ "rewards/rejected": -0.6699492335319519,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.3167498708899983,
689
+ "grad_norm": 21.64590415072203,
690
+ "learning_rate": 7.605202754399388e-07,
691
+ "logits/chosen": -0.2189503312110901,
692
+ "logits/rejected": -0.2030341923236847,
693
+ "logps/chosen": -201.5972900390625,
694
+ "logps/rejected": -206.25152587890625,
695
+ "loss": 0.5764,
696
+ "rewards/accuracies": 0.6859375238418579,
697
+ "rewards/chosen": -0.22065776586532593,
698
+ "rewards/margins": 0.47276243567466736,
699
+ "rewards/rejected": -0.6934202313423157,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.32363573764847653,
704
+ "grad_norm": 23.7581974437075,
705
+ "learning_rate": 7.528691660290742e-07,
706
+ "logits/chosen": -0.22047793865203857,
707
+ "logits/rejected": -0.19113516807556152,
708
+ "logps/chosen": -199.07958984375,
709
+ "logps/rejected": -199.7004852294922,
710
+ "loss": 0.5815,
711
+ "rewards/accuracies": 0.7015624642372131,
712
+ "rewards/chosen": -0.24612434208393097,
713
+ "rewards/margins": 0.4380369186401367,
714
+ "rewards/rejected": -0.6841613054275513,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.3305216044069547,
719
+ "grad_norm": 19.206769763517677,
720
+ "learning_rate": 7.452180566182096e-07,
721
+ "logits/chosen": -0.2202739715576172,
722
+ "logits/rejected": -0.19977977871894836,
723
+ "logps/chosen": -193.5897216796875,
724
+ "logps/rejected": -202.0029754638672,
725
+ "loss": 0.601,
726
+ "rewards/accuracies": 0.671875,
727
+ "rewards/chosen": -0.31127315759658813,
728
+ "rewards/margins": 0.4242890477180481,
729
+ "rewards/rejected": -0.7355621457099915,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.33740747116543296,
734
+ "grad_norm": 24.0864716650183,
735
+ "learning_rate": 7.37566947207345e-07,
736
+ "logits/chosen": -0.2397327870130539,
737
+ "logits/rejected": -0.21453964710235596,
738
+ "logps/chosen": -203.811279296875,
739
+ "logps/rejected": -207.71994018554688,
740
+ "loss": 0.5945,
741
+ "rewards/accuracies": 0.6687500476837158,
742
+ "rewards/chosen": -0.1965150386095047,
743
+ "rewards/margins": 0.4662647843360901,
744
+ "rewards/rejected": -0.6627798080444336,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.34429333792391115,
749
+ "grad_norm": 23.441407049395554,
750
+ "learning_rate": 7.299158377964805e-07,
751
+ "logits/chosen": -0.2867848873138428,
752
+ "logits/rejected": -0.26185181736946106,
753
+ "logps/chosen": -197.30357360839844,
754
+ "logps/rejected": -202.0620574951172,
755
+ "loss": 0.5829,
756
+ "rewards/accuracies": 0.699999988079071,
757
+ "rewards/chosen": -0.22782030701637268,
758
+ "rewards/margins": 0.4941255450248718,
759
+ "rewards/rejected": -0.7219458222389221,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.3511792046823894,
764
+ "grad_norm": 24.24165547819113,
765
+ "learning_rate": 7.222647283856159e-07,
766
+ "logits/chosen": -0.322261780500412,
767
+ "logits/rejected": -0.29276198148727417,
768
+ "logps/chosen": -208.84205627441406,
769
+ "logps/rejected": -212.8937530517578,
770
+ "loss": 0.5725,
771
+ "rewards/accuracies": 0.7312500476837158,
772
+ "rewards/chosen": -0.17469552159309387,
773
+ "rewards/margins": 0.5125805139541626,
774
+ "rewards/rejected": -0.6872760057449341,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.35806507144086763,
779
+ "grad_norm": 20.872269830269385,
780
+ "learning_rate": 7.146136189747513e-07,
781
+ "logits/chosen": -0.3218885660171509,
782
+ "logits/rejected": -0.29816335439682007,
783
+ "logps/chosen": -193.01654052734375,
784
+ "logps/rejected": -196.80349731445312,
785
+ "loss": 0.577,
786
+ "rewards/accuracies": 0.703125,
787
+ "rewards/chosen": -0.24363525211811066,
788
+ "rewards/margins": 0.4531596302986145,
789
+ "rewards/rejected": -0.696794867515564,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.3649509381993458,
794
+ "grad_norm": 28.4673161426841,
795
+ "learning_rate": 7.069625095638867e-07,
796
+ "logits/chosen": -0.34582602977752686,
797
+ "logits/rejected": -0.31747424602508545,
798
+ "logps/chosen": -196.18231201171875,
799
+ "logps/rejected": -201.42234802246094,
800
+ "loss": 0.5766,
801
+ "rewards/accuracies": 0.707812488079071,
802
+ "rewards/chosen": -0.24639394879341125,
803
+ "rewards/margins": 0.5019959211349487,
804
+ "rewards/rejected": -0.7483898997306824,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.37183680495782406,
809
+ "grad_norm": 24.919532919405224,
810
+ "learning_rate": 6.993114001530222e-07,
811
+ "logits/chosen": -0.3100280165672302,
812
+ "logits/rejected": -0.29064637422561646,
813
+ "logps/chosen": -198.67388916015625,
814
+ "logps/rejected": -202.16505432128906,
815
+ "loss": 0.5615,
816
+ "rewards/accuracies": 0.7171875238418579,
817
+ "rewards/chosen": -0.1877853274345398,
818
+ "rewards/margins": 0.5081425905227661,
819
+ "rewards/rejected": -0.6959279179573059,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.3787226717163023,
824
+ "grad_norm": 22.60350537919111,
825
+ "learning_rate": 6.916602907421576e-07,
826
+ "logits/chosen": -0.29818981885910034,
827
+ "logits/rejected": -0.26806640625,
828
+ "logps/chosen": -191.7179412841797,
829
+ "logps/rejected": -201.2666015625,
830
+ "loss": 0.5994,
831
+ "rewards/accuracies": 0.6890625357627869,
832
+ "rewards/chosen": -0.2534283399581909,
833
+ "rewards/margins": 0.48638302087783813,
834
+ "rewards/rejected": -0.739811360836029,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.3856085384747805,
839
+ "grad_norm": 25.67908922423141,
840
+ "learning_rate": 6.84009181331293e-07,
841
+ "logits/chosen": -0.2803298830986023,
842
+ "logits/rejected": -0.2273053675889969,
843
+ "logps/chosen": -193.68869018554688,
844
+ "logps/rejected": -201.9783935546875,
845
+ "loss": 0.6023,
846
+ "rewards/accuracies": 0.6656250357627869,
847
+ "rewards/chosen": -0.23008039593696594,
848
+ "rewards/margins": 0.43934205174446106,
849
+ "rewards/rejected": -0.6694223880767822,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.39249440523325874,
854
+ "grad_norm": 23.790727771917158,
855
+ "learning_rate": 6.763580719204285e-07,
856
+ "logits/chosen": -0.2388366162776947,
857
+ "logits/rejected": -0.20770037174224854,
858
+ "logps/chosen": -195.17596435546875,
859
+ "logps/rejected": -203.053466796875,
860
+ "loss": 0.5773,
861
+ "rewards/accuracies": 0.6812500357627869,
862
+ "rewards/chosen": -0.26547545194625854,
863
+ "rewards/margins": 0.4970327615737915,
864
+ "rewards/rejected": -0.7625082731246948,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.399380271991737,
869
+ "grad_norm": 22.2692853997441,
870
+ "learning_rate": 6.687069625095638e-07,
871
+ "logits/chosen": -0.25696471333503723,
872
+ "logits/rejected": -0.2511045038700104,
873
+ "logps/chosen": -201.10848999023438,
874
+ "logps/rejected": -210.22183227539062,
875
+ "loss": 0.5876,
876
+ "rewards/accuracies": 0.7093750238418579,
877
+ "rewards/chosen": -0.2806377708911896,
878
+ "rewards/margins": 0.48363256454467773,
879
+ "rewards/rejected": -0.7642703056335449,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.40626613875021517,
884
+ "grad_norm": 20.563254457184485,
885
+ "learning_rate": 6.610558530986993e-07,
886
+ "logits/chosen": -0.2363700568675995,
887
+ "logits/rejected": -0.2016097605228424,
888
+ "logps/chosen": -192.15310668945312,
889
+ "logps/rejected": -198.71865844726562,
890
+ "loss": 0.5925,
891
+ "rewards/accuracies": 0.6734374761581421,
892
+ "rewards/chosen": -0.2997559607028961,
893
+ "rewards/margins": 0.4971032738685608,
894
+ "rewards/rejected": -0.7968591451644897,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.4131520055086934,
899
+ "grad_norm": 27.42297939022822,
900
+ "learning_rate": 6.534047436878347e-07,
901
+ "logits/chosen": -0.25612396001815796,
902
+ "logits/rejected": -0.24113687872886658,
903
+ "logps/chosen": -202.80996704101562,
904
+ "logps/rejected": -206.5990753173828,
905
+ "loss": 0.5923,
906
+ "rewards/accuracies": 0.6875,
907
+ "rewards/chosen": -0.2972622215747833,
908
+ "rewards/margins": 0.44471475481987,
909
+ "rewards/rejected": -0.7419769763946533,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.42003787226717165,
914
+ "grad_norm": 21.920676991754373,
915
+ "learning_rate": 6.457536342769701e-07,
916
+ "logits/chosen": -0.298888236284256,
917
+ "logits/rejected": -0.2644284665584564,
918
+ "logps/chosen": -196.77108764648438,
919
+ "logps/rejected": -203.39205932617188,
920
+ "loss": 0.6058,
921
+ "rewards/accuracies": 0.6890625357627869,
922
+ "rewards/chosen": -0.3232823312282562,
923
+ "rewards/margins": 0.45335179567337036,
924
+ "rewards/rejected": -0.7766340970993042,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.42692373902564984,
929
+ "grad_norm": 22.64266147842626,
930
+ "learning_rate": 6.381025248661056e-07,
931
+ "logits/chosen": -0.28491443395614624,
932
+ "logits/rejected": -0.25603392720222473,
933
+ "logps/chosen": -196.12831115722656,
934
+ "logps/rejected": -210.8316192626953,
935
+ "loss": 0.5515,
936
+ "rewards/accuracies": 0.7312500476837158,
937
+ "rewards/chosen": -0.22433951497077942,
938
+ "rewards/margins": 0.564956545829773,
939
+ "rewards/rejected": -0.7892960906028748,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.4338096057841281,
944
+ "grad_norm": 22.374449198558654,
945
+ "learning_rate": 6.304514154552409e-07,
946
+ "logits/chosen": -0.2371789813041687,
947
+ "logits/rejected": -0.21084988117218018,
948
+ "logps/chosen": -196.3636932373047,
949
+ "logps/rejected": -205.9162139892578,
950
+ "loss": 0.5643,
951
+ "rewards/accuracies": 0.723437488079071,
952
+ "rewards/chosen": -0.22664019465446472,
953
+ "rewards/margins": 0.5722544193267822,
954
+ "rewards/rejected": -0.7988946437835693,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.4406954725426063,
959
+ "grad_norm": 23.27173099331,
960
+ "learning_rate": 6.228003060443764e-07,
961
+ "logits/chosen": -0.22489549219608307,
962
+ "logits/rejected": -0.20038262009620667,
963
+ "logps/chosen": -186.9170684814453,
964
+ "logps/rejected": -196.61355590820312,
965
+ "loss": 0.5342,
966
+ "rewards/accuracies": 0.7531250715255737,
967
+ "rewards/chosen": -0.1936413049697876,
968
+ "rewards/margins": 0.6107944250106812,
969
+ "rewards/rejected": -0.8044357299804688,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.4475813393010845,
974
+ "grad_norm": 23.12513848271565,
975
+ "learning_rate": 6.151491966335118e-07,
976
+ "logits/chosen": -0.2062905728816986,
977
+ "logits/rejected": -0.1937059462070465,
978
+ "logps/chosen": -207.9930877685547,
979
+ "logps/rejected": -216.9258575439453,
980
+ "loss": 0.5964,
981
+ "rewards/accuracies": 0.6953125,
982
+ "rewards/chosen": -0.3074573278427124,
983
+ "rewards/margins": 0.5258516669273376,
984
+ "rewards/rejected": -0.8333090543746948,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.45446720605956276,
989
+ "grad_norm": 23.623328160601158,
990
+ "learning_rate": 6.074980872226473e-07,
991
+ "logits/chosen": -0.2363741099834442,
992
+ "logits/rejected": -0.19654276967048645,
993
+ "logps/chosen": -201.72991943359375,
994
+ "logps/rejected": -210.9528045654297,
995
+ "loss": 0.593,
996
+ "rewards/accuracies": 0.6875,
997
+ "rewards/chosen": -0.3014895021915436,
998
+ "rewards/margins": 0.4921172559261322,
999
+ "rewards/rejected": -0.7936067581176758,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.46135307281804094,
1004
+ "grad_norm": 22.738540567341648,
1005
+ "learning_rate": 5.998469778117827e-07,
1006
+ "logits/chosen": -0.25270819664001465,
1007
+ "logits/rejected": -0.22932419180870056,
1008
+ "logps/chosen": -199.99636840820312,
1009
+ "logps/rejected": -211.69168090820312,
1010
+ "loss": 0.5572,
1011
+ "rewards/accuracies": 0.7109375,
1012
+ "rewards/chosen": -0.25672680139541626,
1013
+ "rewards/margins": 0.5584204792976379,
1014
+ "rewards/rejected": -0.8151472806930542,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.4682389395765192,
1019
+ "grad_norm": 25.684422832815645,
1020
+ "learning_rate": 5.92195868400918e-07,
1021
+ "logits/chosen": -0.19682064652442932,
1022
+ "logits/rejected": -0.1747521609067917,
1023
+ "logps/chosen": -207.11001586914062,
1024
+ "logps/rejected": -213.78707885742188,
1025
+ "loss": 0.574,
1026
+ "rewards/accuracies": 0.7000000476837158,
1027
+ "rewards/chosen": -0.34769681096076965,
1028
+ "rewards/margins": 0.5807652473449707,
1029
+ "rewards/rejected": -0.928462028503418,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.47512480633499743,
1034
+ "grad_norm": 20.71126021785729,
1035
+ "learning_rate": 5.845447589900535e-07,
1036
+ "logits/chosen": -0.24822764098644257,
1037
+ "logits/rejected": -0.22624938189983368,
1038
+ "logps/chosen": -204.87779235839844,
1039
+ "logps/rejected": -210.4464569091797,
1040
+ "loss": 0.5606,
1041
+ "rewards/accuracies": 0.7375000715255737,
1042
+ "rewards/chosen": -0.28525784611701965,
1043
+ "rewards/margins": 0.5286524891853333,
1044
+ "rewards/rejected": -0.8139103651046753,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.4820106730934756,
1049
+ "grad_norm": 26.349944755934768,
1050
+ "learning_rate": 5.768936495791889e-07,
1051
+ "logits/chosen": -0.21356113255023956,
1052
+ "logits/rejected": -0.1965031921863556,
1053
+ "logps/chosen": -199.73861694335938,
1054
+ "logps/rejected": -204.8638153076172,
1055
+ "loss": 0.5816,
1056
+ "rewards/accuracies": 0.6968749761581421,
1057
+ "rewards/chosen": -0.3604661524295807,
1058
+ "rewards/margins": 0.5489579439163208,
1059
+ "rewards/rejected": -0.9094240665435791,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.48889653985195386,
1064
+ "grad_norm": 23.535098442619628,
1065
+ "learning_rate": 5.692425401683244e-07,
1066
+ "logits/chosen": -0.22684970498085022,
1067
+ "logits/rejected": -0.1970938742160797,
1068
+ "logps/chosen": -197.89776611328125,
1069
+ "logps/rejected": -206.56236267089844,
1070
+ "loss": 0.5411,
1071
+ "rewards/accuracies": 0.7359374761581421,
1072
+ "rewards/chosen": -0.3052551746368408,
1073
+ "rewards/margins": 0.689602255821228,
1074
+ "rewards/rejected": -0.9948574304580688,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.4957824066104321,
1079
+ "grad_norm": 22.76862056281205,
1080
+ "learning_rate": 5.615914307574598e-07,
1081
+ "logits/chosen": -0.2701479494571686,
1082
+ "logits/rejected": -0.2529926002025604,
1083
+ "logps/chosen": -205.52587890625,
1084
+ "logps/rejected": -217.68336486816406,
1085
+ "loss": 0.5499,
1086
+ "rewards/accuracies": 0.731249988079071,
1087
+ "rewards/chosen": -0.3150857090950012,
1088
+ "rewards/margins": 0.6676312685012817,
1089
+ "rewards/rejected": -0.9827169179916382,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.5026682733689103,
1094
+ "grad_norm": 24.11490022902473,
1095
+ "learning_rate": 5.539403213465952e-07,
1096
+ "logits/chosen": -0.26072609424591064,
1097
+ "logits/rejected": -0.24569085240364075,
1098
+ "logps/chosen": -206.5780792236328,
1099
+ "logps/rejected": -214.70892333984375,
1100
+ "loss": 0.5664,
1101
+ "rewards/accuracies": 0.7312500476837158,
1102
+ "rewards/chosen": -0.34358713030815125,
1103
+ "rewards/margins": 0.5998888611793518,
1104
+ "rewards/rejected": -0.9434760212898254,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5095541401273885,
1109
+ "grad_norm": 24.667914885216494,
1110
+ "learning_rate": 5.462892119357306e-07,
1111
+ "logits/chosen": -0.2955350875854492,
1112
+ "logits/rejected": -0.267596960067749,
1113
+ "logps/chosen": -204.7119140625,
1114
+ "logps/rejected": -219.7845916748047,
1115
+ "loss": 0.6024,
1116
+ "rewards/accuracies": 0.6968750357627869,
1117
+ "rewards/chosen": -0.35112637281417847,
1118
+ "rewards/margins": 0.5716503262519836,
1119
+ "rewards/rejected": -0.9227766394615173,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5164400068858668,
1124
+ "grad_norm": 26.70861098025808,
1125
+ "learning_rate": 5.38638102524866e-07,
1126
+ "logits/chosen": -0.27651071548461914,
1127
+ "logits/rejected": -0.2475259155035019,
1128
+ "logps/chosen": -197.40142822265625,
1129
+ "logps/rejected": -209.98281860351562,
1130
+ "loss": 0.564,
1131
+ "rewards/accuracies": 0.71875,
1132
+ "rewards/chosen": -0.2942546308040619,
1133
+ "rewards/margins": 0.5781306028366089,
1134
+ "rewards/rejected": -0.8723852038383484,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.523325873644345,
1139
+ "grad_norm": 22.606570297594942,
1140
+ "learning_rate": 5.309869931140015e-07,
1141
+ "logits/chosen": -0.3192262053489685,
1142
+ "logits/rejected": -0.2856769859790802,
1143
+ "logps/chosen": -198.66526794433594,
1144
+ "logps/rejected": -209.6296844482422,
1145
+ "loss": 0.5744,
1146
+ "rewards/accuracies": 0.7109375,
1147
+ "rewards/chosen": -0.33672210574150085,
1148
+ "rewards/margins": 0.5660472512245178,
1149
+ "rewards/rejected": -0.9027693271636963,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5302117404028232,
1154
+ "grad_norm": 20.35249313890797,
1155
+ "learning_rate": 5.233358837031369e-07,
1156
+ "logits/chosen": -0.28755855560302734,
1157
+ "logits/rejected": -0.23860663175582886,
1158
+ "logps/chosen": -206.0916290283203,
1159
+ "logps/rejected": -218.12562561035156,
1160
+ "loss": 0.5347,
1161
+ "rewards/accuracies": 0.7093750238418579,
1162
+ "rewards/chosen": -0.39329051971435547,
1163
+ "rewards/margins": 0.6772298216819763,
1164
+ "rewards/rejected": -1.0705204010009766,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.5370976071613014,
1169
+ "grad_norm": 23.98875613027924,
1170
+ "learning_rate": 5.156847742922723e-07,
1171
+ "logits/chosen": -0.29115819931030273,
1172
+ "logits/rejected": -0.2669009566307068,
1173
+ "logps/chosen": -195.95689392089844,
1174
+ "logps/rejected": -204.91221618652344,
1175
+ "loss": 0.5347,
1176
+ "rewards/accuracies": 0.7312500476837158,
1177
+ "rewards/chosen": -0.3034020662307739,
1178
+ "rewards/margins": 0.6618468165397644,
1179
+ "rewards/rejected": -0.9652489423751831,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5439834739197796,
1184
+ "grad_norm": 21.889026312300274,
1185
+ "learning_rate": 5.080336648814077e-07,
1186
+ "logits/chosen": -0.2898753881454468,
1187
+ "logits/rejected": -0.26701635122299194,
1188
+ "logps/chosen": -197.73936462402344,
1189
+ "logps/rejected": -207.7303924560547,
1190
+ "loss": 0.5585,
1191
+ "rewards/accuracies": 0.7203124761581421,
1192
+ "rewards/chosen": -0.37752565741539,
1193
+ "rewards/margins": 0.6297482252120972,
1194
+ "rewards/rejected": -1.0072739124298096,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5508693406782579,
1199
+ "grad_norm": 22.78320300180032,
1200
+ "learning_rate": 5.003825554705431e-07,
1201
+ "logits/chosen": -0.25664806365966797,
1202
+ "logits/rejected": -0.2466067373752594,
1203
+ "logps/chosen": -203.13661193847656,
1204
+ "logps/rejected": -211.3241424560547,
1205
+ "loss": 0.5629,
1206
+ "rewards/accuracies": 0.7328124642372131,
1207
+ "rewards/chosen": -0.3754059672355652,
1208
+ "rewards/margins": 0.6045074462890625,
1209
+ "rewards/rejected": -0.9799134731292725,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5577552074367361,
1214
+ "grad_norm": 20.845027944826946,
1215
+ "learning_rate": 4.927314460596787e-07,
1216
+ "logits/chosen": -0.24083290994167328,
1217
+ "logits/rejected": -0.20561712980270386,
1218
+ "logps/chosen": -199.92298889160156,
1219
+ "logps/rejected": -212.12783813476562,
1220
+ "loss": 0.5331,
1221
+ "rewards/accuracies": 0.7515624761581421,
1222
+ "rewards/chosen": -0.33173495531082153,
1223
+ "rewards/margins": 0.7252557873725891,
1224
+ "rewards/rejected": -1.0569908618927002,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5646410741952144,
1229
+ "grad_norm": 23.03440686836388,
1230
+ "learning_rate": 4.850803366488141e-07,
1231
+ "logits/chosen": -0.27781039476394653,
1232
+ "logits/rejected": -0.25066977739334106,
1233
+ "logps/chosen": -205.1341552734375,
1234
+ "logps/rejected": -215.48867797851562,
1235
+ "loss": 0.5524,
1236
+ "rewards/accuracies": 0.7250000238418579,
1237
+ "rewards/chosen": -0.4363483190536499,
1238
+ "rewards/margins": 0.6705613732337952,
1239
+ "rewards/rejected": -1.1069096326828003,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.5715269409536925,
1244
+ "grad_norm": 25.880904118103416,
1245
+ "learning_rate": 4.774292272379495e-07,
1246
+ "logits/chosen": -0.26057904958724976,
1247
+ "logits/rejected": -0.25437307357788086,
1248
+ "logps/chosen": -199.4198455810547,
1249
+ "logps/rejected": -206.81875610351562,
1250
+ "loss": 0.5671,
1251
+ "rewards/accuracies": 0.7281249761581421,
1252
+ "rewards/chosen": -0.4462214708328247,
1253
+ "rewards/margins": 0.6281691193580627,
1254
+ "rewards/rejected": -1.0743906497955322,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5784128077121707,
1259
+ "grad_norm": 22.29449401198685,
1260
+ "learning_rate": 4.697781178270849e-07,
1261
+ "logits/chosen": -0.22187501192092896,
1262
+ "logits/rejected": -0.1855536848306656,
1263
+ "logps/chosen": -194.93341064453125,
1264
+ "logps/rejected": -207.49209594726562,
1265
+ "loss": 0.5342,
1266
+ "rewards/accuracies": 0.7359375357627869,
1267
+ "rewards/chosen": -0.32722094655036926,
1268
+ "rewards/margins": 0.69352787733078,
1269
+ "rewards/rejected": -1.0207487344741821,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.585298674470649,
1274
+ "grad_norm": 23.681302591674456,
1275
+ "learning_rate": 4.621270084162203e-07,
1276
+ "logits/chosen": -0.26163047552108765,
1277
+ "logits/rejected": -0.2241986244916916,
1278
+ "logps/chosen": -202.11473083496094,
1279
+ "logps/rejected": -211.72190856933594,
1280
+ "loss": 0.5385,
1281
+ "rewards/accuracies": 0.753125011920929,
1282
+ "rewards/chosen": -0.35107535123825073,
1283
+ "rewards/margins": 0.6962921023368835,
1284
+ "rewards/rejected": -1.0473673343658447,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.5921845412291272,
1289
+ "grad_norm": 24.089626973464824,
1290
+ "learning_rate": 4.5447589900535577e-07,
1291
+ "logits/chosen": -0.2115095555782318,
1292
+ "logits/rejected": -0.1764950454235077,
1293
+ "logps/chosen": -202.98338317871094,
1294
+ "logps/rejected": -214.40467834472656,
1295
+ "loss": 0.564,
1296
+ "rewards/accuracies": 0.7109375,
1297
+ "rewards/chosen": -0.4069036841392517,
1298
+ "rewards/margins": 0.6364681720733643,
1299
+ "rewards/rejected": -1.0433719158172607,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.5990704079876055,
1304
+ "grad_norm": 19.882045733809733,
1305
+ "learning_rate": 4.4682478959449117e-07,
1306
+ "logits/chosen": -0.259220689535141,
1307
+ "logits/rejected": -0.23780278861522675,
1308
+ "logps/chosen": -198.67410278320312,
1309
+ "logps/rejected": -212.25843811035156,
1310
+ "loss": 0.5551,
1311
+ "rewards/accuracies": 0.7406250238418579,
1312
+ "rewards/chosen": -0.3562455475330353,
1313
+ "rewards/margins": 0.6455320119857788,
1314
+ "rewards/rejected": -1.0017775297164917,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.6059562747460837,
1319
+ "grad_norm": 22.848542792283176,
1320
+ "learning_rate": 4.391736801836266e-07,
1321
+ "logits/chosen": -0.22383271157741547,
1322
+ "logits/rejected": -0.1870381087064743,
1323
+ "logps/chosen": -195.6481170654297,
1324
+ "logps/rejected": -214.82052612304688,
1325
+ "loss": 0.5571,
1326
+ "rewards/accuracies": 0.7046875357627869,
1327
+ "rewards/chosen": -0.3412778675556183,
1328
+ "rewards/margins": 0.667373538017273,
1329
+ "rewards/rejected": -1.0086513757705688,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6128421415045618,
1334
+ "grad_norm": 22.07328478798222,
1335
+ "learning_rate": 4.315225707727621e-07,
1336
+ "logits/chosen": -0.2479555606842041,
1337
+ "logits/rejected": -0.23453199863433838,
1338
+ "logps/chosen": -207.93699645996094,
1339
+ "logps/rejected": -213.48202514648438,
1340
+ "loss": 0.5519,
1341
+ "rewards/accuracies": 0.75,
1342
+ "rewards/chosen": -0.41722822189331055,
1343
+ "rewards/margins": 0.6200730800628662,
1344
+ "rewards/rejected": -1.0373013019561768,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.6197280082630401,
1349
+ "grad_norm": 23.698155914649785,
1350
+ "learning_rate": 4.238714613618974e-07,
1351
+ "logits/chosen": -0.2827821969985962,
1352
+ "logits/rejected": -0.2659350335597992,
1353
+ "logps/chosen": -194.46591186523438,
1354
+ "logps/rejected": -204.5747528076172,
1355
+ "loss": 0.5094,
1356
+ "rewards/accuracies": 0.7406250238418579,
1357
+ "rewards/chosen": -0.34797102212905884,
1358
+ "rewards/margins": 0.7520918846130371,
1359
+ "rewards/rejected": -1.1000628471374512,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.6266138750215183,
1364
+ "grad_norm": 23.820275610745107,
1365
+ "learning_rate": 4.162203519510329e-07,
1366
+ "logits/chosen": -0.30107322335243225,
1367
+ "logits/rejected": -0.2755378484725952,
1368
+ "logps/chosen": -193.82180786132812,
1369
+ "logps/rejected": -201.58070373535156,
1370
+ "loss": 0.5408,
1371
+ "rewards/accuracies": 0.7265625,
1372
+ "rewards/chosen": -0.36847078800201416,
1373
+ "rewards/margins": 0.6643326878547668,
1374
+ "rewards/rejected": -1.0328035354614258,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.6334997417799966,
1379
+ "grad_norm": 25.18137423151907,
1380
+ "learning_rate": 4.085692425401683e-07,
1381
+ "logits/chosen": -0.2921736240386963,
1382
+ "logits/rejected": -0.27882224321365356,
1383
+ "logps/chosen": -205.14035034179688,
1384
+ "logps/rejected": -218.99960327148438,
1385
+ "loss": 0.5653,
1386
+ "rewards/accuracies": 0.7312500476837158,
1387
+ "rewards/chosen": -0.35154396295547485,
1388
+ "rewards/margins": 0.6647427678108215,
1389
+ "rewards/rejected": -1.0162867307662964,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6403856085384748,
1394
+ "grad_norm": 21.770256912198327,
1395
+ "learning_rate": 4.0091813312930373e-07,
1396
+ "logits/chosen": -0.2791680693626404,
1397
+ "logits/rejected": -0.26233184337615967,
1398
+ "logps/chosen": -197.62933349609375,
1399
+ "logps/rejected": -208.18617248535156,
1400
+ "loss": 0.5285,
1401
+ "rewards/accuracies": 0.729687511920929,
1402
+ "rewards/chosen": -0.320173054933548,
1403
+ "rewards/margins": 0.7681188583374023,
1404
+ "rewards/rejected": -1.088291883468628,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.6472714752969531,
1409
+ "grad_norm": 20.768362506243623,
1410
+ "learning_rate": 3.932670237184392e-07,
1411
+ "logits/chosen": -0.3179224729537964,
1412
+ "logits/rejected": -0.28427645564079285,
1413
+ "logps/chosen": -205.0663299560547,
1414
+ "logps/rejected": -213.0223388671875,
1415
+ "loss": 0.5237,
1416
+ "rewards/accuracies": 0.7437500357627869,
1417
+ "rewards/chosen": -0.3529094159603119,
1418
+ "rewards/margins": 0.7018887996673584,
1419
+ "rewards/rejected": -1.0547982454299927,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.6541573420554312,
1424
+ "grad_norm": 24.168246031231085,
1425
+ "learning_rate": 3.856159143075746e-07,
1426
+ "logits/chosen": -0.2807343602180481,
1427
+ "logits/rejected": -0.260441392660141,
1428
+ "logps/chosen": -204.5944366455078,
1429
+ "logps/rejected": -217.90736389160156,
1430
+ "loss": 0.5382,
1431
+ "rewards/accuracies": 0.745312511920929,
1432
+ "rewards/chosen": -0.33213984966278076,
1433
+ "rewards/margins": 0.7174164056777954,
1434
+ "rewards/rejected": -1.0495562553405762,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6610432088139094,
1439
+ "grad_norm": 20.29598172536042,
1440
+ "learning_rate": 3.7796480489671e-07,
1441
+ "logits/chosen": -0.28455644845962524,
1442
+ "logits/rejected": -0.25150707364082336,
1443
+ "logps/chosen": -199.77969360351562,
1444
+ "logps/rejected": -209.3079833984375,
1445
+ "loss": 0.5277,
1446
+ "rewards/accuracies": 0.7390625476837158,
1447
+ "rewards/chosen": -0.3357803523540497,
1448
+ "rewards/margins": 0.6916924715042114,
1449
+ "rewards/rejected": -1.0274728536605835,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6679290755723877,
1454
+ "grad_norm": 23.263000578519865,
1455
+ "learning_rate": 3.7031369548584544e-07,
1456
+ "logits/chosen": -0.3258031904697418,
1457
+ "logits/rejected": -0.3066006600856781,
1458
+ "logps/chosen": -198.5902099609375,
1459
+ "logps/rejected": -211.4296875,
1460
+ "loss": 0.5169,
1461
+ "rewards/accuracies": 0.7546875476837158,
1462
+ "rewards/chosen": -0.2921166718006134,
1463
+ "rewards/margins": 0.7654004693031311,
1464
+ "rewards/rejected": -1.057517170906067,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6748149423308659,
1469
+ "grad_norm": 27.58985651394188,
1470
+ "learning_rate": 3.6266258607498084e-07,
1471
+ "logits/chosen": -0.32159218192100525,
1472
+ "logits/rejected": -0.3140263557434082,
1473
+ "logps/chosen": -202.26385498046875,
1474
+ "logps/rejected": -217.04469299316406,
1475
+ "loss": 0.5366,
1476
+ "rewards/accuracies": 0.737500011920929,
1477
+ "rewards/chosen": -0.3014247417449951,
1478
+ "rewards/margins": 0.7427166700363159,
1479
+ "rewards/rejected": -1.044141411781311,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6817008090893442,
1484
+ "grad_norm": 23.116860359749147,
1485
+ "learning_rate": 3.550114766641163e-07,
1486
+ "logits/chosen": -0.2947474420070648,
1487
+ "logits/rejected": -0.26782095432281494,
1488
+ "logps/chosen": -205.06268310546875,
1489
+ "logps/rejected": -221.36981201171875,
1490
+ "loss": 0.5549,
1491
+ "rewards/accuracies": 0.7437500357627869,
1492
+ "rewards/chosen": -0.3489997684955597,
1493
+ "rewards/margins": 0.7195862531661987,
1494
+ "rewards/rejected": -1.0685861110687256,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.6885866758478223,
1499
+ "grad_norm": 23.978575280673038,
1500
+ "learning_rate": 3.473603672532517e-07,
1501
+ "logits/chosen": -0.3905680477619171,
1502
+ "logits/rejected": -0.3640524446964264,
1503
+ "logps/chosen": -198.04745483398438,
1504
+ "logps/rejected": -208.67324829101562,
1505
+ "loss": 0.5436,
1506
+ "rewards/accuracies": 0.721875011920929,
1507
+ "rewards/chosen": -0.34358733892440796,
1508
+ "rewards/margins": 0.6835037469863892,
1509
+ "rewards/rejected": -1.0270910263061523,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6954725426063005,
1514
+ "grad_norm": 20.189411791862387,
1515
+ "learning_rate": 3.3970925784238715e-07,
1516
+ "logits/chosen": -0.29821571707725525,
1517
+ "logits/rejected": -0.2687515318393707,
1518
+ "logps/chosen": -200.1770782470703,
1519
+ "logps/rejected": -216.76576232910156,
1520
+ "loss": 0.5122,
1521
+ "rewards/accuracies": 0.7468750476837158,
1522
+ "rewards/chosen": -0.3284381031990051,
1523
+ "rewards/margins": 0.7797881364822388,
1524
+ "rewards/rejected": -1.1082262992858887,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.7023584093647788,
1529
+ "grad_norm": 24.729161803473662,
1530
+ "learning_rate": 3.3205814843152255e-07,
1531
+ "logits/chosen": -0.35373279452323914,
1532
+ "logits/rejected": -0.30401962995529175,
1533
+ "logps/chosen": -199.4246826171875,
1534
+ "logps/rejected": -211.8179168701172,
1535
+ "loss": 0.5261,
1536
+ "rewards/accuracies": 0.7406250238418579,
1537
+ "rewards/chosen": -0.349697470664978,
1538
+ "rewards/margins": 0.7501545548439026,
1539
+ "rewards/rejected": -1.0998519659042358,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.709244276123257,
1544
+ "grad_norm": 19.89999565338996,
1545
+ "learning_rate": 3.2440703902065795e-07,
1546
+ "logits/chosen": -0.3316059112548828,
1547
+ "logits/rejected": -0.2792198657989502,
1548
+ "logps/chosen": -204.58319091796875,
1549
+ "logps/rejected": -224.2587127685547,
1550
+ "loss": 0.5143,
1551
+ "rewards/accuracies": 0.7562499642372131,
1552
+ "rewards/chosen": -0.3548116385936737,
1553
+ "rewards/margins": 0.8157339096069336,
1554
+ "rewards/rejected": -1.1705455780029297,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7161301428817353,
1559
+ "grad_norm": 21.29752204371925,
1560
+ "learning_rate": 3.167559296097934e-07,
1561
+ "logits/chosen": -0.3314603567123413,
1562
+ "logits/rejected": -0.30693235993385315,
1563
+ "logps/chosen": -203.65330505371094,
1564
+ "logps/rejected": -214.09750366210938,
1565
+ "loss": 0.5347,
1566
+ "rewards/accuracies": 0.7484375238418579,
1567
+ "rewards/chosen": -0.37685102224349976,
1568
+ "rewards/margins": 0.7335286736488342,
1569
+ "rewards/rejected": -1.110379695892334,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.7230160096402135,
1574
+ "grad_norm": 25.358261156803007,
1575
+ "learning_rate": 3.091048201989288e-07,
1576
+ "logits/chosen": -0.3865748643875122,
1577
+ "logits/rejected": -0.3486027121543884,
1578
+ "logps/chosen": -204.2187957763672,
1579
+ "logps/rejected": -213.5407257080078,
1580
+ "loss": 0.5281,
1581
+ "rewards/accuracies": 0.7437499761581421,
1582
+ "rewards/chosen": -0.39082208275794983,
1583
+ "rewards/margins": 0.7356584668159485,
1584
+ "rewards/rejected": -1.1264805793762207,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7299018763986916,
1589
+ "grad_norm": 26.050502019430347,
1590
+ "learning_rate": 3.0145371078806426e-07,
1591
+ "logits/chosen": -0.31970304250717163,
1592
+ "logits/rejected": -0.2877205014228821,
1593
+ "logps/chosen": -199.24700927734375,
1594
+ "logps/rejected": -211.90777587890625,
1595
+ "loss": 0.5556,
1596
+ "rewards/accuracies": 0.7328125238418579,
1597
+ "rewards/chosen": -0.36632290482521057,
1598
+ "rewards/margins": 0.7173328399658203,
1599
+ "rewards/rejected": -1.083655834197998,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.7367877431571699,
1604
+ "grad_norm": 26.846914766196015,
1605
+ "learning_rate": 2.938026013771997e-07,
1606
+ "logits/chosen": -0.3748086392879486,
1607
+ "logits/rejected": -0.32532060146331787,
1608
+ "logps/chosen": -204.9930419921875,
1609
+ "logps/rejected": -217.00918579101562,
1610
+ "loss": 0.518,
1611
+ "rewards/accuracies": 0.7437500357627869,
1612
+ "rewards/chosen": -0.32681968808174133,
1613
+ "rewards/margins": 0.7986001968383789,
1614
+ "rewards/rejected": -1.1254198551177979,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.7436736099156481,
1619
+ "grad_norm": 19.81791648317305,
1620
+ "learning_rate": 2.861514919663351e-07,
1621
+ "logits/chosen": -0.35734954476356506,
1622
+ "logits/rejected": -0.3137910068035126,
1623
+ "logps/chosen": -190.3447265625,
1624
+ "logps/rejected": -205.507568359375,
1625
+ "loss": 0.4957,
1626
+ "rewards/accuracies": 0.770312488079071,
1627
+ "rewards/chosen": -0.301634281873703,
1628
+ "rewards/margins": 0.8440784215927124,
1629
+ "rewards/rejected": -1.1457128524780273,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7505594766741264,
1634
+ "grad_norm": 21.610848655147894,
1635
+ "learning_rate": 2.785003825554705e-07,
1636
+ "logits/chosen": -0.3268454074859619,
1637
+ "logits/rejected": -0.31099632382392883,
1638
+ "logps/chosen": -206.75418090820312,
1639
+ "logps/rejected": -215.38848876953125,
1640
+ "loss": 0.5508,
1641
+ "rewards/accuracies": 0.7328125238418579,
1642
+ "rewards/chosen": -0.4365284740924835,
1643
+ "rewards/margins": 0.7196505665779114,
1644
+ "rewards/rejected": -1.1561790704727173,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7574453434326046,
1649
+ "grad_norm": 21.30323322387434,
1650
+ "learning_rate": 2.7084927314460597e-07,
1651
+ "logits/chosen": -0.3255113959312439,
1652
+ "logits/rejected": -0.2992578446865082,
1653
+ "logps/chosen": -192.91909790039062,
1654
+ "logps/rejected": -208.11151123046875,
1655
+ "loss": 0.5145,
1656
+ "rewards/accuracies": 0.753125011920929,
1657
+ "rewards/chosen": -0.3881222903728485,
1658
+ "rewards/margins": 0.8132278919219971,
1659
+ "rewards/rejected": -1.201350212097168,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7643312101910829,
1664
+ "grad_norm": 20.135623041976924,
1665
+ "learning_rate": 2.6319816373374137e-07,
1666
+ "logits/chosen": -0.3734041750431061,
1667
+ "logits/rejected": -0.3278706967830658,
1668
+ "logps/chosen": -202.39517211914062,
1669
+ "logps/rejected": -215.2314910888672,
1670
+ "loss": 0.5493,
1671
+ "rewards/accuracies": 0.7265625596046448,
1672
+ "rewards/chosen": -0.3905448317527771,
1673
+ "rewards/margins": 0.7275549173355103,
1674
+ "rewards/rejected": -1.1180996894836426,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.771217076949561,
1679
+ "grad_norm": 22.016629785130043,
1680
+ "learning_rate": 2.555470543228768e-07,
1681
+ "logits/chosen": -0.4030528664588928,
1682
+ "logits/rejected": -0.3748015761375427,
1683
+ "logps/chosen": -199.6988983154297,
1684
+ "logps/rejected": -211.9576416015625,
1685
+ "loss": 0.5556,
1686
+ "rewards/accuracies": 0.715624988079071,
1687
+ "rewards/chosen": -0.3250424861907959,
1688
+ "rewards/margins": 0.724652111530304,
1689
+ "rewards/rejected": -1.0496946573257446,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7781029437080392,
1694
+ "grad_norm": 23.41840157614551,
1695
+ "learning_rate": 2.4789594491201223e-07,
1696
+ "logits/chosen": -0.36145588755607605,
1697
+ "logits/rejected": -0.2941068410873413,
1698
+ "logps/chosen": -197.30921936035156,
1699
+ "logps/rejected": -212.53648376464844,
1700
+ "loss": 0.5247,
1701
+ "rewards/accuracies": 0.753125011920929,
1702
+ "rewards/chosen": -0.3182806372642517,
1703
+ "rewards/margins": 0.7811294794082642,
1704
+ "rewards/rejected": -1.099410057067871,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7849888104665175,
1709
+ "grad_norm": 21.06304161826817,
1710
+ "learning_rate": 2.402448355011477e-07,
1711
+ "logits/chosen": -0.32696184515953064,
1712
+ "logits/rejected": -0.2999955117702484,
1713
+ "logps/chosen": -193.55752563476562,
1714
+ "logps/rejected": -213.47972106933594,
1715
+ "loss": 0.5388,
1716
+ "rewards/accuracies": 0.7437500357627869,
1717
+ "rewards/chosen": -0.3747640550136566,
1718
+ "rewards/margins": 0.7560409307479858,
1719
+ "rewards/rejected": -1.1308048963546753,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7918746772249957,
1724
+ "grad_norm": 20.793097909497455,
1725
+ "learning_rate": 2.3259372609028308e-07,
1726
+ "logits/chosen": -0.36112964153289795,
1727
+ "logits/rejected": -0.3267067074775696,
1728
+ "logps/chosen": -203.5668182373047,
1729
+ "logps/rejected": -212.1382598876953,
1730
+ "loss": 0.5362,
1731
+ "rewards/accuracies": 0.7515624761581421,
1732
+ "rewards/chosen": -0.3771086037158966,
1733
+ "rewards/margins": 0.7114807367324829,
1734
+ "rewards/rejected": -1.0885894298553467,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.798760543983474,
1739
+ "grad_norm": 24.909405192079628,
1740
+ "learning_rate": 2.2494261667941848e-07,
1741
+ "logits/chosen": -0.34521499276161194,
1742
+ "logits/rejected": -0.32405370473861694,
1743
+ "logps/chosen": -194.30148315429688,
1744
+ "logps/rejected": -208.0577392578125,
1745
+ "loss": 0.5209,
1746
+ "rewards/accuracies": 0.7640625238418579,
1747
+ "rewards/chosen": -0.3107369542121887,
1748
+ "rewards/margins": 0.7539686560630798,
1749
+ "rewards/rejected": -1.0647056102752686,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.8056464107419521,
1754
+ "grad_norm": 25.5580619560939,
1755
+ "learning_rate": 2.1729150726855394e-07,
1756
+ "logits/chosen": -0.3413427770137787,
1757
+ "logits/rejected": -0.30263420939445496,
1758
+ "logps/chosen": -192.62452697753906,
1759
+ "logps/rejected": -201.20782470703125,
1760
+ "loss": 0.5456,
1761
+ "rewards/accuracies": 0.7406250238418579,
1762
+ "rewards/chosen": -0.3429095149040222,
1763
+ "rewards/margins": 0.7008405327796936,
1764
+ "rewards/rejected": -1.0437501668930054,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.8125322775004303,
1769
+ "grad_norm": 20.40074379527458,
1770
+ "learning_rate": 2.0964039785768936e-07,
1771
+ "logits/chosen": -0.3403051495552063,
1772
+ "logits/rejected": -0.3150518536567688,
1773
+ "logps/chosen": -202.23458862304688,
1774
+ "logps/rejected": -218.47067260742188,
1775
+ "loss": 0.5012,
1776
+ "rewards/accuracies": 0.7406250238418579,
1777
+ "rewards/chosen": -0.3068324327468872,
1778
+ "rewards/margins": 0.8549840450286865,
1779
+ "rewards/rejected": -1.1618163585662842,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8194181442589086,
1784
+ "grad_norm": 24.7780771443421,
1785
+ "learning_rate": 2.019892884468248e-07,
1786
+ "logits/chosen": -0.3548561930656433,
1787
+ "logits/rejected": -0.33061498403549194,
1788
+ "logps/chosen": -198.79690551757812,
1789
+ "logps/rejected": -216.0151824951172,
1790
+ "loss": 0.5174,
1791
+ "rewards/accuracies": 0.7609375715255737,
1792
+ "rewards/chosen": -0.3230586349964142,
1793
+ "rewards/margins": 0.8606723546981812,
1794
+ "rewards/rejected": -1.1837310791015625,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.8263040110173868,
1799
+ "grad_norm": 26.180779145376622,
1800
+ "learning_rate": 1.943381790359602e-07,
1801
+ "logits/chosen": -0.3222372233867645,
1802
+ "logits/rejected": -0.29423391819000244,
1803
+ "logps/chosen": -205.35232543945312,
1804
+ "logps/rejected": -218.5619659423828,
1805
+ "loss": 0.5403,
1806
+ "rewards/accuracies": 0.714062511920929,
1807
+ "rewards/chosen": -0.4436132311820984,
1808
+ "rewards/margins": 0.730433464050293,
1809
+ "rewards/rejected": -1.1740467548370361,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.8331898777758651,
1814
+ "grad_norm": 24.835512380545932,
1815
+ "learning_rate": 1.8668706962509562e-07,
1816
+ "logits/chosen": -0.3160286545753479,
1817
+ "logits/rejected": -0.289877712726593,
1818
+ "logps/chosen": -198.57736206054688,
1819
+ "logps/rejected": -208.46429443359375,
1820
+ "loss": 0.5334,
1821
+ "rewards/accuracies": 0.7484375238418579,
1822
+ "rewards/chosen": -0.3664984703063965,
1823
+ "rewards/margins": 0.7671695947647095,
1824
+ "rewards/rejected": -1.133668065071106,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.8400757445343433,
1829
+ "grad_norm": 24.491015676047468,
1830
+ "learning_rate": 1.7903596021423107e-07,
1831
+ "logits/chosen": -0.31306660175323486,
1832
+ "logits/rejected": -0.2788822650909424,
1833
+ "logps/chosen": -202.64613342285156,
1834
+ "logps/rejected": -214.78054809570312,
1835
+ "loss": 0.5213,
1836
+ "rewards/accuracies": 0.7359374761581421,
1837
+ "rewards/chosen": -0.42141586542129517,
1838
+ "rewards/margins": 0.7737562656402588,
1839
+ "rewards/rejected": -1.1951720714569092,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8469616112928214,
1844
+ "grad_norm": 24.059372649386674,
1845
+ "learning_rate": 1.7138485080336647e-07,
1846
+ "logits/chosen": -0.3656036853790283,
1847
+ "logits/rejected": -0.3244956135749817,
1848
+ "logps/chosen": -199.56884765625,
1849
+ "logps/rejected": -217.80772399902344,
1850
+ "loss": 0.493,
1851
+ "rewards/accuracies": 0.7718750238418579,
1852
+ "rewards/chosen": -0.325540691614151,
1853
+ "rewards/margins": 0.8619417548179626,
1854
+ "rewards/rejected": -1.187482476234436,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.8538474780512997,
1859
+ "grad_norm": 18.27717476339513,
1860
+ "learning_rate": 1.637337413925019e-07,
1861
+ "logits/chosen": -0.4079727828502655,
1862
+ "logits/rejected": -0.35690948367118835,
1863
+ "logps/chosen": -200.95855712890625,
1864
+ "logps/rejected": -218.0155029296875,
1865
+ "loss": 0.4972,
1866
+ "rewards/accuracies": 0.7671875357627869,
1867
+ "rewards/chosen": -0.36264702677726746,
1868
+ "rewards/margins": 0.8437535166740417,
1869
+ "rewards/rejected": -1.2064005136489868,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8607333448097779,
1874
+ "grad_norm": 23.065530605405485,
1875
+ "learning_rate": 1.5608263198163733e-07,
1876
+ "logits/chosen": -0.33324557542800903,
1877
+ "logits/rejected": -0.3149731755256653,
1878
+ "logps/chosen": -197.61123657226562,
1879
+ "logps/rejected": -204.68450927734375,
1880
+ "loss": 0.5792,
1881
+ "rewards/accuracies": 0.7156250476837158,
1882
+ "rewards/chosen": -0.44249990582466125,
1883
+ "rewards/margins": 0.6359133720397949,
1884
+ "rewards/rejected": -1.0784132480621338,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8676192115682562,
1889
+ "grad_norm": 20.73050366973397,
1890
+ "learning_rate": 1.4843152257077276e-07,
1891
+ "logits/chosen": -0.3171376883983612,
1892
+ "logits/rejected": -0.28074705600738525,
1893
+ "logps/chosen": -209.62078857421875,
1894
+ "logps/rejected": -221.52203369140625,
1895
+ "loss": 0.5382,
1896
+ "rewards/accuracies": 0.71875,
1897
+ "rewards/chosen": -0.41873699426651,
1898
+ "rewards/margins": 0.7794735431671143,
1899
+ "rewards/rejected": -1.198210597038269,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.8745050783267344,
1904
+ "grad_norm": 21.760277541218,
1905
+ "learning_rate": 1.4078041315990818e-07,
1906
+ "logits/chosen": -0.3449145257472992,
1907
+ "logits/rejected": -0.30704426765441895,
1908
+ "logps/chosen": -197.60946655273438,
1909
+ "logps/rejected": -218.5653076171875,
1910
+ "loss": 0.5103,
1911
+ "rewards/accuracies": 0.7515625357627869,
1912
+ "rewards/chosen": -0.3605595529079437,
1913
+ "rewards/margins": 0.8724721670150757,
1914
+ "rewards/rejected": -1.2330316305160522,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8813909450852127,
1919
+ "grad_norm": 21.675216189688292,
1920
+ "learning_rate": 1.331293037490436e-07,
1921
+ "logits/chosen": -0.37944453954696655,
1922
+ "logits/rejected": -0.3386183977127075,
1923
+ "logps/chosen": -198.25186157226562,
1924
+ "logps/rejected": -213.14938354492188,
1925
+ "loss": 0.5217,
1926
+ "rewards/accuracies": 0.7484375238418579,
1927
+ "rewards/chosen": -0.3340380787849426,
1928
+ "rewards/margins": 0.793717086315155,
1929
+ "rewards/rejected": -1.127755045890808,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8882768118436908,
1934
+ "grad_norm": 26.560486484637135,
1935
+ "learning_rate": 1.25478194338179e-07,
1936
+ "logits/chosen": -0.3314391076564789,
1937
+ "logits/rejected": -0.29684922099113464,
1938
+ "logps/chosen": -204.3308563232422,
1939
+ "logps/rejected": -218.03488159179688,
1940
+ "loss": 0.5456,
1941
+ "rewards/accuracies": 0.746874988079071,
1942
+ "rewards/chosen": -0.3798133134841919,
1943
+ "rewards/margins": 0.7324703335762024,
1944
+ "rewards/rejected": -1.112283706665039,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.895162678602169,
1949
+ "grad_norm": 21.343267028495998,
1950
+ "learning_rate": 1.1782708492731445e-07,
1951
+ "logits/chosen": -0.35801607370376587,
1952
+ "logits/rejected": -0.33859655261039734,
1953
+ "logps/chosen": -193.88063049316406,
1954
+ "logps/rejected": -212.57852172851562,
1955
+ "loss": 0.525,
1956
+ "rewards/accuracies": 0.745312511920929,
1957
+ "rewards/chosen": -0.402618944644928,
1958
+ "rewards/margins": 0.7918345928192139,
1959
+ "rewards/rejected": -1.1944535970687866,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.9020485453606473,
1964
+ "grad_norm": 23.511393076340966,
1965
+ "learning_rate": 1.1017597551644987e-07,
1966
+ "logits/chosen": -0.3655666708946228,
1967
+ "logits/rejected": -0.327680379152298,
1968
+ "logps/chosen": -200.72186279296875,
1969
+ "logps/rejected": -212.2158203125,
1970
+ "loss": 0.521,
1971
+ "rewards/accuracies": 0.7250000238418579,
1972
+ "rewards/chosen": -0.38576409220695496,
1973
+ "rewards/margins": 0.760851263999939,
1974
+ "rewards/rejected": -1.1466155052185059,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.9089344121191255,
1979
+ "grad_norm": 22.97590079047081,
1980
+ "learning_rate": 1.0252486610558531e-07,
1981
+ "logits/chosen": -0.34880805015563965,
1982
+ "logits/rejected": -0.3193954825401306,
1983
+ "logps/chosen": -204.40707397460938,
1984
+ "logps/rejected": -218.28848266601562,
1985
+ "loss": 0.5178,
1986
+ "rewards/accuracies": 0.745312511920929,
1987
+ "rewards/chosen": -0.38576388359069824,
1988
+ "rewards/margins": 0.8154311180114746,
1989
+ "rewards/rejected": -1.2011948823928833,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.9158202788776038,
1994
+ "grad_norm": 20.603754969383694,
1995
+ "learning_rate": 9.487375669472072e-08,
1996
+ "logits/chosen": -0.33260923624038696,
1997
+ "logits/rejected": -0.30312278866767883,
1998
+ "logps/chosen": -203.37368774414062,
1999
+ "logps/rejected": -215.08058166503906,
2000
+ "loss": 0.5512,
2001
+ "rewards/accuracies": 0.721875011920929,
2002
+ "rewards/chosen": -0.42205169796943665,
2003
+ "rewards/margins": 0.725147008895874,
2004
+ "rewards/rejected": -1.1471986770629883,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9227061456360819,
2009
+ "grad_norm": 21.191512286508,
2010
+ "learning_rate": 8.722264728385616e-08,
2011
+ "logits/chosen": -0.317133367061615,
2012
+ "logits/rejected": -0.2866368293762207,
2013
+ "logps/chosen": -199.86302185058594,
2014
+ "logps/rejected": -214.34527587890625,
2015
+ "loss": 0.525,
2016
+ "rewards/accuracies": 0.7437500357627869,
2017
+ "rewards/chosen": -0.4098660349845886,
2018
+ "rewards/margins": 0.795417845249176,
2019
+ "rewards/rejected": -1.2052838802337646,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9295920123945601,
2024
+ "grad_norm": 23.130990012985187,
2025
+ "learning_rate": 7.957153787299158e-08,
2026
+ "logits/chosen": -0.3709363043308258,
2027
+ "logits/rejected": -0.33735787868499756,
2028
+ "logps/chosen": -200.1358184814453,
2029
+ "logps/rejected": -213.65025329589844,
2030
+ "loss": 0.529,
2031
+ "rewards/accuracies": 0.7265625,
2032
+ "rewards/chosen": -0.35897552967071533,
2033
+ "rewards/margins": 0.7571591734886169,
2034
+ "rewards/rejected": -1.1161346435546875,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.9364778791530384,
2039
+ "grad_norm": 18.120466166668564,
2040
+ "learning_rate": 7.1920428462127e-08,
2041
+ "logits/chosen": -0.329379141330719,
2042
+ "logits/rejected": -0.27726149559020996,
2043
+ "logps/chosen": -199.76431274414062,
2044
+ "logps/rejected": -208.10348510742188,
2045
+ "loss": 0.4741,
2046
+ "rewards/accuracies": 0.770312488079071,
2047
+ "rewards/chosen": -0.3599850535392761,
2048
+ "rewards/margins": 0.8584945797920227,
2049
+ "rewards/rejected": -1.2184796333312988,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.9433637459115166,
2054
+ "grad_norm": 22.98023018579539,
2055
+ "learning_rate": 6.426931905126243e-08,
2056
+ "logits/chosen": -0.35116755962371826,
2057
+ "logits/rejected": -0.3177265524864197,
2058
+ "logps/chosen": -194.65065002441406,
2059
+ "logps/rejected": -209.42718505859375,
2060
+ "loss": 0.5188,
2061
+ "rewards/accuracies": 0.7515625357627869,
2062
+ "rewards/chosen": -0.3557957410812378,
2063
+ "rewards/margins": 0.79322749376297,
2064
+ "rewards/rejected": -1.1490232944488525,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.9502496126699949,
2069
+ "grad_norm": 20.3222208875411,
2070
+ "learning_rate": 5.661820964039785e-08,
2071
+ "logits/chosen": -0.28668609261512756,
2072
+ "logits/rejected": -0.2664375901222229,
2073
+ "logps/chosen": -202.85134887695312,
2074
+ "logps/rejected": -215.53598022460938,
2075
+ "loss": 0.5022,
2076
+ "rewards/accuracies": 0.7671874761581421,
2077
+ "rewards/chosen": -0.3684632480144501,
2078
+ "rewards/margins": 0.8430503010749817,
2079
+ "rewards/rejected": -1.2115135192871094,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.9571354794284731,
2084
+ "grad_norm": 24.983901569618467,
2085
+ "learning_rate": 4.896710022953328e-08,
2086
+ "logits/chosen": -0.32899972796440125,
2087
+ "logits/rejected": -0.27919813990592957,
2088
+ "logps/chosen": -199.9358367919922,
2089
+ "logps/rejected": -213.5103759765625,
2090
+ "loss": 0.5119,
2091
+ "rewards/accuracies": 0.753125011920929,
2092
+ "rewards/chosen": -0.364630788564682,
2093
+ "rewards/margins": 0.8253352642059326,
2094
+ "rewards/rejected": -1.1899659633636475,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9640213461869512,
2099
+ "grad_norm": 18.554502970350672,
2100
+ "learning_rate": 4.1315990818668707e-08,
2101
+ "logits/chosen": -0.3384089469909668,
2102
+ "logits/rejected": -0.3053920269012451,
2103
+ "logps/chosen": -198.3701934814453,
2104
+ "logps/rejected": -205.8408203125,
2105
+ "loss": 0.5312,
2106
+ "rewards/accuracies": 0.745312511920929,
2107
+ "rewards/chosen": -0.3812635540962219,
2108
+ "rewards/margins": 0.7723795175552368,
2109
+ "rewards/rejected": -1.1536431312561035,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9709072129454295,
2114
+ "grad_norm": 18.01675823590483,
2115
+ "learning_rate": 3.366488140780413e-08,
2116
+ "logits/chosen": -0.32437849044799805,
2117
+ "logits/rejected": -0.2919423580169678,
2118
+ "logps/chosen": -201.2761688232422,
2119
+ "logps/rejected": -218.30552673339844,
2120
+ "loss": 0.4874,
2121
+ "rewards/accuracies": 0.7718750238418579,
2122
+ "rewards/chosen": -0.37892454862594604,
2123
+ "rewards/margins": 0.9029428958892822,
2124
+ "rewards/rejected": -1.2818673849105835,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9777930797039077,
2129
+ "grad_norm": 29.39318412255784,
2130
+ "learning_rate": 2.6013771996939555e-08,
2131
+ "logits/chosen": -0.3163904845714569,
2132
+ "logits/rejected": -0.29091766476631165,
2133
+ "logps/chosen": -204.334228515625,
2134
+ "logps/rejected": -215.79502868652344,
2135
+ "loss": 0.5022,
2136
+ "rewards/accuracies": 0.7671874761581421,
2137
+ "rewards/chosen": -0.38413873314857483,
2138
+ "rewards/margins": 0.827136754989624,
2139
+ "rewards/rejected": -1.2112754583358765,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.984678946462386,
2144
+ "grad_norm": 19.461173491088918,
2145
+ "learning_rate": 1.8362662586074982e-08,
2146
+ "logits/chosen": -0.3308340609073639,
2147
+ "logits/rejected": -0.30718377232551575,
2148
+ "logps/chosen": -198.67318725585938,
2149
+ "logps/rejected": -216.25709533691406,
2150
+ "loss": 0.4884,
2151
+ "rewards/accuracies": 0.778124988079071,
2152
+ "rewards/chosen": -0.3805069327354431,
2153
+ "rewards/margins": 0.8595027327537537,
2154
+ "rewards/rejected": -1.2400096654891968,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.9915648132208642,
2159
+ "grad_norm": 24.14268800406746,
2160
+ "learning_rate": 1.0711553175210405e-08,
2161
+ "logits/chosen": -0.321536123752594,
2162
+ "logits/rejected": -0.2926163375377655,
2163
+ "logps/chosen": -197.82186889648438,
2164
+ "logps/rejected": -209.32135009765625,
2165
+ "loss": 0.5198,
2166
+ "rewards/accuracies": 0.734375,
2167
+ "rewards/chosen": -0.3956286907196045,
2168
+ "rewards/margins": 0.800367534160614,
2169
+ "rewards/rejected": -1.1959962844848633,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 0.9984506799793424,
2174
+ "grad_norm": 19.57722185128953,
2175
+ "learning_rate": 3.06044376434583e-09,
2176
+ "logits/chosen": -0.36749356985092163,
2177
+ "logits/rejected": -0.3329501450061798,
2178
+ "logps/chosen": -197.41607666015625,
2179
+ "logps/rejected": -212.35079956054688,
2180
+ "loss": 0.4988,
2181
+ "rewards/accuracies": 0.7734375,
2182
+ "rewards/chosen": -0.3526759147644043,
2183
+ "rewards/margins": 0.8231874108314514,
2184
+ "rewards/rejected": -1.175863265991211,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 1.0,
2189
+ "step": 1453,
2190
+ "total_flos": 159648317243392.0,
2191
+ "train_loss": 0.5763244779045961,
2192
+ "train_runtime": 14193.5309,
2193
+ "train_samples_per_second": 6.548,
2194
+ "train_steps_per_second": 0.102
2195
+ }
2196
+ ],
2197
+ "logging_steps": 10,
2198
+ "max_steps": 1453,
2199
+ "num_input_tokens_seen": 0,
2200
+ "num_train_epochs": 1,
2201
+ "save_steps": 500,
2202
+ "stateful_callbacks": {
2203
+ "TrainerControl": {
2204
+ "args": {
2205
+ "should_epoch_stop": false,
2206
+ "should_evaluate": false,
2207
+ "should_log": false,
2208
+ "should_save": true,
2209
+ "should_training_stop": true
2210
+ },
2211
+ "attributes": {}
2212
+ }
2213
+ },
2214
+ "total_flos": 159648317243392.0,
2215
+ "train_batch_size": 1,
2216
+ "trial_name": null,
2217
+ "trial_params": null
2218
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2563263dce28d47cd685ad855ed3282ac27a6982b6180e6ce41b1a30b345336
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED