davidanugraha commited on
Commit
2ed6fed
·
verified ·
1 Parent(s): f20f47b

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_nemotron_ml
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_nemotron_ml
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_nemotron_ml dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160109785841664.0,
4
+ "train_loss": 0.5976956401903605,
5
+ "train_runtime": 14200.9584,
6
+ "train_samples_per_second": 6.554,
7
+ "train_steps_per_second": 0.102
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d6de24935ae1a84fa7c563e800ae6df3fcafe12ff9c49f962807cd216fe35ec
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:579cb6c398451aea4f437d370cdccf53075178357406ee78fa6183df846ca8fb
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160109785841664.0,
4
+ "train_loss": 0.5976956401903605,
5
+ "train_runtime": 14200.9584,
6
+ "train_samples_per_second": 6.554,
7
+ "train_steps_per_second": 0.102
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1455, "loss": 0.6926, "accuracy": 0.4078124761581421, "lr": 6.164383561643836e-08, "epoch": 0.0068766923109984095, "percentage": 0.69, "elapsed_time": "0:01:40", "remaining_time": "4:00:58"}
2
+ {"current_steps": 20, "total_steps": 1455, "loss": 0.6939, "accuracy": 0.4843750298023224, "lr": 1.3013698630136985e-07, "epoch": 0.013753384621996819, "percentage": 1.37, "elapsed_time": "0:03:17", "remaining_time": "3:56:18"}
3
+ {"current_steps": 30, "total_steps": 1455, "loss": 0.6928, "accuracy": 0.5046875476837158, "lr": 1.9863013698630135e-07, "epoch": 0.02063007693299523, "percentage": 2.06, "elapsed_time": "0:04:57", "remaining_time": "3:55:54"}
4
+ {"current_steps": 40, "total_steps": 1455, "loss": 0.6936, "accuracy": 0.4859375059604645, "lr": 2.671232876712329e-07, "epoch": 0.027506769243993638, "percentage": 2.75, "elapsed_time": "0:06:36", "remaining_time": "3:53:51"}
5
+ {"current_steps": 50, "total_steps": 1455, "loss": 0.6931, "accuracy": 0.510937511920929, "lr": 3.3561643835616436e-07, "epoch": 0.03438346155499205, "percentage": 3.44, "elapsed_time": "0:08:15", "remaining_time": "3:52:03"}
6
+ {"current_steps": 60, "total_steps": 1455, "loss": 0.6926, "accuracy": 0.510937511920929, "lr": 4.041095890410959e-07, "epoch": 0.04126015386599046, "percentage": 4.12, "elapsed_time": "0:09:55", "remaining_time": "3:50:35"}
7
+ {"current_steps": 70, "total_steps": 1455, "loss": 0.6929, "accuracy": 0.504687488079071, "lr": 4.726027397260274e-07, "epoch": 0.048136846176988865, "percentage": 4.81, "elapsed_time": "0:11:32", "remaining_time": "3:48:27"}
8
+ {"current_steps": 80, "total_steps": 1455, "loss": 0.6907, "accuracy": 0.5718749761581421, "lr": 5.410958904109589e-07, "epoch": 0.055013538487987276, "percentage": 5.5, "elapsed_time": "0:13:09", "remaining_time": "3:46:17"}
9
+ {"current_steps": 90, "total_steps": 1455, "loss": 0.6918, "accuracy": 0.5234375, "lr": 6.095890410958904e-07, "epoch": 0.06189023079898569, "percentage": 6.19, "elapsed_time": "0:14:48", "remaining_time": "3:44:33"}
10
+ {"current_steps": 100, "total_steps": 1455, "loss": 0.6881, "accuracy": 0.5750000476837158, "lr": 6.78082191780822e-07, "epoch": 0.0687669231099841, "percentage": 6.87, "elapsed_time": "0:16:28", "remaining_time": "3:43:08"}
11
+ {"current_steps": 110, "total_steps": 1455, "loss": 0.6889, "accuracy": 0.5562500357627869, "lr": 7.465753424657533e-07, "epoch": 0.07564361542098251, "percentage": 7.56, "elapsed_time": "0:18:05", "remaining_time": "3:41:17"}
12
+ {"current_steps": 120, "total_steps": 1455, "loss": 0.686, "accuracy": 0.5703125, "lr": 8.150684931506849e-07, "epoch": 0.08252030773198092, "percentage": 8.25, "elapsed_time": "0:19:47", "remaining_time": "3:40:07"}
13
+ {"current_steps": 130, "total_steps": 1455, "loss": 0.6825, "accuracy": 0.5765625238418579, "lr": 8.835616438356164e-07, "epoch": 0.08939700004297933, "percentage": 8.93, "elapsed_time": "0:21:25", "remaining_time": "3:38:18"}
14
+ {"current_steps": 140, "total_steps": 1455, "loss": 0.6835, "accuracy": 0.596875011920929, "lr": 9.520547945205479e-07, "epoch": 0.09627369235397773, "percentage": 9.62, "elapsed_time": "0:23:02", "remaining_time": "3:36:23"}
15
+ {"current_steps": 150, "total_steps": 1455, "loss": 0.6789, "accuracy": 0.5828125476837158, "lr": 9.977081741787625e-07, "epoch": 0.10315038466497614, "percentage": 10.31, "elapsed_time": "0:24:42", "remaining_time": "3:35:00"}
16
+ {"current_steps": 160, "total_steps": 1455, "loss": 0.6785, "accuracy": 0.5843750238418579, "lr": 9.90068754774637e-07, "epoch": 0.11002707697597455, "percentage": 11.0, "elapsed_time": "0:26:21", "remaining_time": "3:33:18"}
17
+ {"current_steps": 170, "total_steps": 1455, "loss": 0.6697, "accuracy": 0.6453125476837158, "lr": 9.824293353705118e-07, "epoch": 0.11690376928697296, "percentage": 11.68, "elapsed_time": "0:27:57", "remaining_time": "3:31:21"}
18
+ {"current_steps": 180, "total_steps": 1455, "loss": 0.6673, "accuracy": 0.6093750596046448, "lr": 9.747899159663866e-07, "epoch": 0.12378046159797138, "percentage": 12.37, "elapsed_time": "0:29:35", "remaining_time": "3:29:38"}
19
+ {"current_steps": 190, "total_steps": 1455, "loss": 0.672, "accuracy": 0.6109375357627869, "lr": 9.671504965622611e-07, "epoch": 0.1306571539089698, "percentage": 13.06, "elapsed_time": "0:31:16", "remaining_time": "3:28:11"}
20
+ {"current_steps": 200, "total_steps": 1455, "loss": 0.6656, "accuracy": 0.614062488079071, "lr": 9.59511077158136e-07, "epoch": 0.1375338462199682, "percentage": 13.75, "elapsed_time": "0:32:55", "remaining_time": "3:26:34"}
21
+ {"current_steps": 210, "total_steps": 1455, "loss": 0.6553, "accuracy": 0.628125011920929, "lr": 9.518716577540107e-07, "epoch": 0.1444105385309666, "percentage": 14.43, "elapsed_time": "0:34:33", "remaining_time": "3:24:50"}
22
+ {"current_steps": 220, "total_steps": 1455, "loss": 0.6632, "accuracy": 0.6031250357627869, "lr": 9.442322383498854e-07, "epoch": 0.15128723084196502, "percentage": 15.12, "elapsed_time": "0:36:11", "remaining_time": "3:23:09"}
23
+ {"current_steps": 230, "total_steps": 1455, "loss": 0.6768, "accuracy": 0.606249988079071, "lr": 9.3659281894576e-07, "epoch": 0.15816392315296343, "percentage": 15.81, "elapsed_time": "0:37:49", "remaining_time": "3:21:27"}
24
+ {"current_steps": 240, "total_steps": 1455, "loss": 0.6448, "accuracy": 0.660937488079071, "lr": 9.289533995416348e-07, "epoch": 0.16504061546396184, "percentage": 16.49, "elapsed_time": "0:39:27", "remaining_time": "3:19:47"}
25
+ {"current_steps": 250, "total_steps": 1455, "loss": 0.672, "accuracy": 0.604687511920929, "lr": 9.213139801375095e-07, "epoch": 0.17191730777496025, "percentage": 17.18, "elapsed_time": "0:41:05", "remaining_time": "3:18:04"}
26
+ {"current_steps": 260, "total_steps": 1455, "loss": 0.6455, "accuracy": 0.6234374642372131, "lr": 9.136745607333842e-07, "epoch": 0.17879400008595867, "percentage": 17.87, "elapsed_time": "0:42:43", "remaining_time": "3:16:24"}
27
+ {"current_steps": 270, "total_steps": 1455, "loss": 0.6469, "accuracy": 0.6265625357627869, "lr": 9.060351413292589e-07, "epoch": 0.18567069239695708, "percentage": 18.56, "elapsed_time": "0:44:23", "remaining_time": "3:14:49"}
28
+ {"current_steps": 280, "total_steps": 1455, "loss": 0.637, "accuracy": 0.6578124761581421, "lr": 8.983957219251337e-07, "epoch": 0.19254738470795546, "percentage": 19.24, "elapsed_time": "0:46:01", "remaining_time": "3:13:09"}
29
+ {"current_steps": 290, "total_steps": 1455, "loss": 0.6617, "accuracy": 0.6078125238418579, "lr": 8.907563025210084e-07, "epoch": 0.19942407701895387, "percentage": 19.93, "elapsed_time": "0:47:39", "remaining_time": "3:11:28"}
30
+ {"current_steps": 300, "total_steps": 1455, "loss": 0.6393, "accuracy": 0.6484375, "lr": 8.83116883116883e-07, "epoch": 0.20630076932995228, "percentage": 20.62, "elapsed_time": "0:49:20", "remaining_time": "3:09:56"}
31
+ {"current_steps": 310, "total_steps": 1455, "loss": 0.6353, "accuracy": 0.660937488079071, "lr": 8.754774637127578e-07, "epoch": 0.2131774616409507, "percentage": 21.31, "elapsed_time": "0:50:58", "remaining_time": "3:08:15"}
32
+ {"current_steps": 320, "total_steps": 1455, "loss": 0.6592, "accuracy": 0.604687511920929, "lr": 8.678380443086325e-07, "epoch": 0.2200541539519491, "percentage": 21.99, "elapsed_time": "0:52:34", "remaining_time": "3:06:29"}
33
+ {"current_steps": 330, "total_steps": 1455, "loss": 0.6347, "accuracy": 0.653124988079071, "lr": 8.601986249045072e-07, "epoch": 0.22693084626294752, "percentage": 22.68, "elapsed_time": "0:54:13", "remaining_time": "3:04:50"}
34
+ {"current_steps": 340, "total_steps": 1455, "loss": 0.6401, "accuracy": 0.6484375, "lr": 8.52559205500382e-07, "epoch": 0.23380753857394593, "percentage": 23.37, "elapsed_time": "0:55:50", "remaining_time": "3:03:08"}
35
+ {"current_steps": 350, "total_steps": 1455, "loss": 0.6335, "accuracy": 0.671875, "lr": 8.449197860962567e-07, "epoch": 0.24068423088494434, "percentage": 24.05, "elapsed_time": "0:57:29", "remaining_time": "3:01:30"}
36
+ {"current_steps": 360, "total_steps": 1455, "loss": 0.6413, "accuracy": 0.6359375715255737, "lr": 8.372803666921313e-07, "epoch": 0.24756092319594275, "percentage": 24.74, "elapsed_time": "0:59:06", "remaining_time": "2:59:46"}
37
+ {"current_steps": 370, "total_steps": 1455, "loss": 0.6349, "accuracy": 0.651562511920929, "lr": 8.296409472880061e-07, "epoch": 0.25443761550694116, "percentage": 25.43, "elapsed_time": "1:00:45", "remaining_time": "2:58:08"}
38
+ {"current_steps": 380, "total_steps": 1455, "loss": 0.6278, "accuracy": 0.6578125357627869, "lr": 8.220015278838807e-07, "epoch": 0.2613143078179396, "percentage": 26.12, "elapsed_time": "1:02:22", "remaining_time": "2:56:28"}
39
+ {"current_steps": 390, "total_steps": 1455, "loss": 0.6311, "accuracy": 0.6484375, "lr": 8.143621084797555e-07, "epoch": 0.268191000128938, "percentage": 26.8, "elapsed_time": "1:04:00", "remaining_time": "2:54:47"}
40
+ {"current_steps": 400, "total_steps": 1455, "loss": 0.6176, "accuracy": 0.6703125238418579, "lr": 8.067226890756303e-07, "epoch": 0.2750676924399364, "percentage": 27.49, "elapsed_time": "1:05:38", "remaining_time": "2:53:09"}
41
+ {"current_steps": 410, "total_steps": 1455, "loss": 0.654, "accuracy": 0.6562500596046448, "lr": 7.990832696715049e-07, "epoch": 0.2819443847509348, "percentage": 28.18, "elapsed_time": "1:07:15", "remaining_time": "2:51:24"}
42
+ {"current_steps": 420, "total_steps": 1455, "loss": 0.6166, "accuracy": 0.6484375, "lr": 7.914438502673797e-07, "epoch": 0.2888210770619332, "percentage": 28.87, "elapsed_time": "1:08:53", "remaining_time": "2:49:45"}
43
+ {"current_steps": 430, "total_steps": 1455, "loss": 0.6121, "accuracy": 0.6812499761581421, "lr": 7.838044308632544e-07, "epoch": 0.29569776937293163, "percentage": 29.55, "elapsed_time": "1:10:33", "remaining_time": "2:48:11"}
44
+ {"current_steps": 440, "total_steps": 1455, "loss": 0.6397, "accuracy": 0.65625, "lr": 7.76165011459129e-07, "epoch": 0.30257446168393004, "percentage": 30.24, "elapsed_time": "1:12:12", "remaining_time": "2:46:34"}
45
+ {"current_steps": 450, "total_steps": 1455, "loss": 0.6273, "accuracy": 0.6640625, "lr": 7.685255920550038e-07, "epoch": 0.30945115399492845, "percentage": 30.93, "elapsed_time": "1:13:50", "remaining_time": "2:44:54"}
46
+ {"current_steps": 460, "total_steps": 1455, "loss": 0.6276, "accuracy": 0.6343749761581421, "lr": 7.608861726508786e-07, "epoch": 0.31632784630592686, "percentage": 31.62, "elapsed_time": "1:15:26", "remaining_time": "2:43:10"}
47
+ {"current_steps": 470, "total_steps": 1455, "loss": 0.6153, "accuracy": 0.6609375476837158, "lr": 7.532467532467532e-07, "epoch": 0.3232045386169253, "percentage": 32.3, "elapsed_time": "1:17:03", "remaining_time": "2:41:30"}
48
+ {"current_steps": 480, "total_steps": 1455, "loss": 0.6265, "accuracy": 0.667187511920929, "lr": 7.45607333842628e-07, "epoch": 0.3300812309279237, "percentage": 32.99, "elapsed_time": "1:18:44", "remaining_time": "2:39:55"}
49
+ {"current_steps": 490, "total_steps": 1455, "loss": 0.5946, "accuracy": 0.6828124523162842, "lr": 7.379679144385026e-07, "epoch": 0.3369579232389221, "percentage": 33.68, "elapsed_time": "1:20:22", "remaining_time": "2:38:17"}
50
+ {"current_steps": 500, "total_steps": 1455, "loss": 0.6099, "accuracy": 0.6796875, "lr": 7.303284950343773e-07, "epoch": 0.3438346155499205, "percentage": 34.36, "elapsed_time": "1:22:01", "remaining_time": "2:36:39"}
51
+ {"current_steps": 510, "total_steps": 1455, "loss": 0.6174, "accuracy": 0.660937488079071, "lr": 7.226890756302521e-07, "epoch": 0.3507113078609189, "percentage": 35.05, "elapsed_time": "1:24:04", "remaining_time": "2:35:47"}
52
+ {"current_steps": 520, "total_steps": 1455, "loss": 0.5978, "accuracy": 0.6890624761581421, "lr": 7.150496562261268e-07, "epoch": 0.35758800017191733, "percentage": 35.74, "elapsed_time": "1:25:42", "remaining_time": "2:34:06"}
53
+ {"current_steps": 530, "total_steps": 1455, "loss": 0.5866, "accuracy": 0.7171875238418579, "lr": 7.074102368220015e-07, "epoch": 0.36446469248291574, "percentage": 36.43, "elapsed_time": "1:27:21", "remaining_time": "2:32:27"}
54
+ {"current_steps": 540, "total_steps": 1455, "loss": 0.6108, "accuracy": 0.6937500238418579, "lr": 6.997708174178763e-07, "epoch": 0.37134138479391415, "percentage": 37.11, "elapsed_time": "1:28:57", "remaining_time": "2:30:44"}
55
+ {"current_steps": 550, "total_steps": 1455, "loss": 0.6143, "accuracy": 0.6703125238418579, "lr": 6.92131398013751e-07, "epoch": 0.37821807710491256, "percentage": 37.8, "elapsed_time": "1:30:34", "remaining_time": "2:29:01"}
56
+ {"current_steps": 560, "total_steps": 1455, "loss": 0.6138, "accuracy": 0.6750000715255737, "lr": 6.844919786096256e-07, "epoch": 0.3850947694159109, "percentage": 38.49, "elapsed_time": "1:32:11", "remaining_time": "2:27:20"}
57
+ {"current_steps": 570, "total_steps": 1455, "loss": 0.6166, "accuracy": 0.667187511920929, "lr": 6.768525592055004e-07, "epoch": 0.39197146172690933, "percentage": 39.18, "elapsed_time": "1:33:48", "remaining_time": "2:25:39"}
58
+ {"current_steps": 580, "total_steps": 1455, "loss": 0.5786, "accuracy": 0.6968749761581421, "lr": 6.69213139801375e-07, "epoch": 0.39884815403790774, "percentage": 39.86, "elapsed_time": "1:35:25", "remaining_time": "2:23:57"}
59
+ {"current_steps": 590, "total_steps": 1455, "loss": 0.6123, "accuracy": 0.6640625, "lr": 6.615737203972498e-07, "epoch": 0.40572484634890615, "percentage": 40.55, "elapsed_time": "1:37:00", "remaining_time": "2:22:13"}
60
+ {"current_steps": 600, "total_steps": 1455, "loss": 0.5818, "accuracy": 0.6875, "lr": 6.539343009931245e-07, "epoch": 0.41260153865990457, "percentage": 41.24, "elapsed_time": "1:38:36", "remaining_time": "2:20:30"}
61
+ {"current_steps": 610, "total_steps": 1455, "loss": 0.5839, "accuracy": 0.7078125476837158, "lr": 6.462948815889992e-07, "epoch": 0.419478230970903, "percentage": 41.92, "elapsed_time": "1:40:15", "remaining_time": "2:18:52"}
62
+ {"current_steps": 620, "total_steps": 1455, "loss": 0.6254, "accuracy": 0.6593750715255737, "lr": 6.386554621848739e-07, "epoch": 0.4263549232819014, "percentage": 42.61, "elapsed_time": "1:41:51", "remaining_time": "2:17:10"}
63
+ {"current_steps": 630, "total_steps": 1455, "loss": 0.6031, "accuracy": 0.7062500715255737, "lr": 6.310160427807486e-07, "epoch": 0.4332316155928998, "percentage": 43.3, "elapsed_time": "1:43:28", "remaining_time": "2:15:30"}
64
+ {"current_steps": 640, "total_steps": 1455, "loss": 0.5985, "accuracy": 0.7140624523162842, "lr": 6.233766233766233e-07, "epoch": 0.4401083079038982, "percentage": 43.99, "elapsed_time": "1:45:07", "remaining_time": "2:13:51"}
65
+ {"current_steps": 650, "total_steps": 1455, "loss": 0.599, "accuracy": 0.6578125357627869, "lr": 6.157372039724981e-07, "epoch": 0.4469850002148966, "percentage": 44.67, "elapsed_time": "1:46:42", "remaining_time": "2:12:09"}
66
+ {"current_steps": 660, "total_steps": 1455, "loss": 0.6192, "accuracy": 0.6859375238418579, "lr": 6.080977845683728e-07, "epoch": 0.45386169252589503, "percentage": 45.36, "elapsed_time": "1:48:18", "remaining_time": "2:10:27"}
67
+ {"current_steps": 670, "total_steps": 1455, "loss": 0.5748, "accuracy": 0.6937500238418579, "lr": 6.004583651642475e-07, "epoch": 0.46073838483689344, "percentage": 46.05, "elapsed_time": "1:49:54", "remaining_time": "2:08:46"}
68
+ {"current_steps": 680, "total_steps": 1455, "loss": 0.5868, "accuracy": 0.6843750476837158, "lr": 5.928189457601223e-07, "epoch": 0.46761507714789186, "percentage": 46.74, "elapsed_time": "1:51:31", "remaining_time": "2:07:05"}
69
+ {"current_steps": 690, "total_steps": 1455, "loss": 0.5795, "accuracy": 0.6875, "lr": 5.851795263559969e-07, "epoch": 0.47449176945889027, "percentage": 47.42, "elapsed_time": "1:53:07", "remaining_time": "2:05:25"}
70
+ {"current_steps": 700, "total_steps": 1455, "loss": 0.6131, "accuracy": 0.7015625238418579, "lr": 5.775401069518716e-07, "epoch": 0.4813684617698887, "percentage": 48.11, "elapsed_time": "1:54:41", "remaining_time": "2:03:42"}
71
+ {"current_steps": 710, "total_steps": 1455, "loss": 0.5876, "accuracy": 0.692187488079071, "lr": 5.699006875477463e-07, "epoch": 0.4882451540808871, "percentage": 48.8, "elapsed_time": "1:56:18", "remaining_time": "2:02:02"}
72
+ {"current_steps": 720, "total_steps": 1455, "loss": 0.5791, "accuracy": 0.7000000476837158, "lr": 5.622612681436211e-07, "epoch": 0.4951218463918855, "percentage": 49.48, "elapsed_time": "1:57:55", "remaining_time": "2:00:22"}
73
+ {"current_steps": 730, "total_steps": 1455, "loss": 0.7215, "accuracy": 0.6953125, "lr": 5.546218487394958e-07, "epoch": 0.5019985387028839, "percentage": 50.17, "elapsed_time": "1:59:32", "remaining_time": "1:58:43"}
74
+ {"current_steps": 740, "total_steps": 1455, "loss": 0.5916, "accuracy": 0.6875, "lr": 5.469824293353705e-07, "epoch": 0.5088752310138823, "percentage": 50.86, "elapsed_time": "2:01:08", "remaining_time": "1:57:03"}
75
+ {"current_steps": 750, "total_steps": 1455, "loss": 0.6025, "accuracy": 0.671875, "lr": 5.393430099312452e-07, "epoch": 0.5157519233248807, "percentage": 51.55, "elapsed_time": "2:02:46", "remaining_time": "1:55:24"}
76
+ {"current_steps": 760, "total_steps": 1455, "loss": 0.5554, "accuracy": 0.7125000357627869, "lr": 5.317035905271199e-07, "epoch": 0.5226286156358791, "percentage": 52.23, "elapsed_time": "2:04:23", "remaining_time": "1:53:45"}
77
+ {"current_steps": 770, "total_steps": 1455, "loss": 0.6091, "accuracy": 0.6609375476837158, "lr": 5.240641711229946e-07, "epoch": 0.5295053079468776, "percentage": 52.92, "elapsed_time": "2:05:58", "remaining_time": "1:52:04"}
78
+ {"current_steps": 780, "total_steps": 1455, "loss": 0.5884, "accuracy": 0.7109375, "lr": 5.164247517188694e-07, "epoch": 0.536382000257876, "percentage": 53.61, "elapsed_time": "2:07:34", "remaining_time": "1:50:24"}
79
+ {"current_steps": 790, "total_steps": 1455, "loss": 0.5752, "accuracy": 0.715624988079071, "lr": 5.08785332314744e-07, "epoch": 0.5432586925688744, "percentage": 54.3, "elapsed_time": "2:09:08", "remaining_time": "1:48:42"}
80
+ {"current_steps": 800, "total_steps": 1455, "loss": 0.5837, "accuracy": 0.7171875238418579, "lr": 5.011459129106188e-07, "epoch": 0.5501353848798728, "percentage": 54.98, "elapsed_time": "2:10:43", "remaining_time": "1:47:01"}
81
+ {"current_steps": 810, "total_steps": 1455, "loss": 0.5731, "accuracy": 0.684374988079071, "lr": 4.935064935064935e-07, "epoch": 0.5570120771908712, "percentage": 55.67, "elapsed_time": "2:12:19", "remaining_time": "1:45:21"}
82
+ {"current_steps": 820, "total_steps": 1455, "loss": 0.5683, "accuracy": 0.7140624523162842, "lr": 4.858670741023682e-07, "epoch": 0.5638887695018696, "percentage": 56.36, "elapsed_time": "2:13:55", "remaining_time": "1:43:42"}
83
+ {"current_steps": 830, "total_steps": 1455, "loss": 0.5698, "accuracy": 0.698437511920929, "lr": 4.782276546982429e-07, "epoch": 0.570765461812868, "percentage": 57.04, "elapsed_time": "2:15:31", "remaining_time": "1:42:02"}
84
+ {"current_steps": 840, "total_steps": 1455, "loss": 0.5546, "accuracy": 0.729687511920929, "lr": 4.705882352941176e-07, "epoch": 0.5776421541238664, "percentage": 57.73, "elapsed_time": "2:17:05", "remaining_time": "1:40:21"}
85
+ {"current_steps": 850, "total_steps": 1455, "loss": 0.5735, "accuracy": 0.7265625, "lr": 4.6294881588999233e-07, "epoch": 0.5845188464348648, "percentage": 58.42, "elapsed_time": "2:18:41", "remaining_time": "1:38:42"}
86
+ {"current_steps": 860, "total_steps": 1455, "loss": 0.5697, "accuracy": 0.706250011920929, "lr": 4.553093964858671e-07, "epoch": 0.5913955387458633, "percentage": 59.11, "elapsed_time": "2:20:16", "remaining_time": "1:37:03"}
87
+ {"current_steps": 870, "total_steps": 1455, "loss": 0.5685, "accuracy": 0.7234375476837158, "lr": 4.4766997708174176e-07, "epoch": 0.5982722310568617, "percentage": 59.79, "elapsed_time": "2:21:54", "remaining_time": "1:35:25"}
88
+ {"current_steps": 880, "total_steps": 1455, "loss": 0.5899, "accuracy": 0.6812500357627869, "lr": 4.4003055767761647e-07, "epoch": 0.6051489233678601, "percentage": 60.48, "elapsed_time": "2:23:33", "remaining_time": "1:33:48"}
89
+ {"current_steps": 890, "total_steps": 1455, "loss": 0.5723, "accuracy": 0.715624988079071, "lr": 4.3239113827349124e-07, "epoch": 0.6120256156788585, "percentage": 61.17, "elapsed_time": "2:25:10", "remaining_time": "1:32:10"}
90
+ {"current_steps": 900, "total_steps": 1455, "loss": 0.5815, "accuracy": 0.7140624523162842, "lr": 4.247517188693659e-07, "epoch": 0.6189023079898569, "percentage": 61.86, "elapsed_time": "2:26:48", "remaining_time": "1:30:31"}
91
+ {"current_steps": 910, "total_steps": 1455, "loss": 0.5716, "accuracy": 0.690625011920929, "lr": 4.171122994652406e-07, "epoch": 0.6257790003008553, "percentage": 62.54, "elapsed_time": "2:28:25", "remaining_time": "1:28:53"}
92
+ {"current_steps": 920, "total_steps": 1455, "loss": 0.5365, "accuracy": 0.734375, "lr": 4.094728800611154e-07, "epoch": 0.6326556926118537, "percentage": 63.23, "elapsed_time": "2:30:00", "remaining_time": "1:27:14"}
93
+ {"current_steps": 930, "total_steps": 1455, "loss": 0.532, "accuracy": 0.765625, "lr": 4.0183346065699004e-07, "epoch": 0.6395323849228521, "percentage": 63.92, "elapsed_time": "2:31:36", "remaining_time": "1:25:35"}
94
+ {"current_steps": 940, "total_steps": 1455, "loss": 0.5656, "accuracy": 0.7203125357627869, "lr": 3.9419404125286475e-07, "epoch": 0.6464090772338505, "percentage": 64.6, "elapsed_time": "2:33:13", "remaining_time": "1:23:56"}
95
+ {"current_steps": 950, "total_steps": 1455, "loss": 0.5528, "accuracy": 0.7171875238418579, "lr": 3.865546218487395e-07, "epoch": 0.653285769544849, "percentage": 65.29, "elapsed_time": "2:34:49", "remaining_time": "1:22:18"}
96
+ {"current_steps": 960, "total_steps": 1455, "loss": 0.5699, "accuracy": 0.7000000476837158, "lr": 3.789152024446142e-07, "epoch": 0.6601624618558474, "percentage": 65.98, "elapsed_time": "2:36:25", "remaining_time": "1:20:39"}
97
+ {"current_steps": 970, "total_steps": 1455, "loss": 0.5486, "accuracy": 0.7187500596046448, "lr": 3.712757830404889e-07, "epoch": 0.6670391541668458, "percentage": 66.67, "elapsed_time": "2:38:01", "remaining_time": "1:19:00"}
98
+ {"current_steps": 980, "total_steps": 1455, "loss": 0.575, "accuracy": 0.7078125476837158, "lr": 3.636363636363636e-07, "epoch": 0.6739158464778442, "percentage": 67.35, "elapsed_time": "2:39:38", "remaining_time": "1:17:22"}
99
+ {"current_steps": 990, "total_steps": 1455, "loss": 0.5489, "accuracy": 0.714062511920929, "lr": 3.5599694423223837e-07, "epoch": 0.6807925387888426, "percentage": 68.04, "elapsed_time": "2:41:15", "remaining_time": "1:15:44"}
100
+ {"current_steps": 1000, "total_steps": 1455, "loss": 0.5568, "accuracy": 0.7250000238418579, "lr": 3.4835752482811303e-07, "epoch": 0.687669231099841, "percentage": 68.73, "elapsed_time": "2:42:52", "remaining_time": "1:14:06"}
101
+ {"current_steps": 1010, "total_steps": 1455, "loss": 0.5783, "accuracy": 0.7125000357627869, "lr": 3.4071810542398775e-07, "epoch": 0.6945459234108394, "percentage": 69.42, "elapsed_time": "2:44:52", "remaining_time": "1:12:38"}
102
+ {"current_steps": 1020, "total_steps": 1455, "loss": 0.515, "accuracy": 0.7718750238418579, "lr": 3.330786860198625e-07, "epoch": 0.7014226157218378, "percentage": 70.1, "elapsed_time": "2:46:28", "remaining_time": "1:10:59"}
103
+ {"current_steps": 1030, "total_steps": 1455, "loss": 0.5566, "accuracy": 0.6984374523162842, "lr": 3.2543926661573717e-07, "epoch": 0.7082993080328363, "percentage": 70.79, "elapsed_time": "2:48:03", "remaining_time": "1:09:20"}
104
+ {"current_steps": 1040, "total_steps": 1455, "loss": 0.5525, "accuracy": 0.7218750715255737, "lr": 3.177998472116119e-07, "epoch": 0.7151760003438347, "percentage": 71.48, "elapsed_time": "2:49:41", "remaining_time": "1:07:42"}
105
+ {"current_steps": 1050, "total_steps": 1455, "loss": 0.5688, "accuracy": 0.7000000476837158, "lr": 3.1016042780748665e-07, "epoch": 0.7220526926548331, "percentage": 72.16, "elapsed_time": "2:51:15", "remaining_time": "1:06:03"}
106
+ {"current_steps": 1060, "total_steps": 1455, "loss": 0.5763, "accuracy": 0.7000000476837158, "lr": 3.025210084033613e-07, "epoch": 0.7289293849658315, "percentage": 72.85, "elapsed_time": "2:52:52", "remaining_time": "1:04:25"}
107
+ {"current_steps": 1070, "total_steps": 1455, "loss": 0.5514, "accuracy": 0.721875011920929, "lr": 2.9488158899923603e-07, "epoch": 0.7358060772768299, "percentage": 73.54, "elapsed_time": "2:54:28", "remaining_time": "1:02:46"}
108
+ {"current_steps": 1080, "total_steps": 1455, "loss": 0.57, "accuracy": 0.71875, "lr": 2.872421695951108e-07, "epoch": 0.7426827695878283, "percentage": 74.23, "elapsed_time": "2:56:03", "remaining_time": "1:01:07"}
109
+ {"current_steps": 1090, "total_steps": 1455, "loss": 0.5452, "accuracy": 0.7000000476837158, "lr": 2.7960275019098545e-07, "epoch": 0.7495594618988267, "percentage": 74.91, "elapsed_time": "2:57:40", "remaining_time": "0:59:29"}
110
+ {"current_steps": 1100, "total_steps": 1455, "loss": 0.545, "accuracy": 0.745312511920929, "lr": 2.7196333078686017e-07, "epoch": 0.7564361542098251, "percentage": 75.6, "elapsed_time": "2:59:17", "remaining_time": "0:57:51"}
111
+ {"current_steps": 1110, "total_steps": 1455, "loss": 0.5584, "accuracy": 0.7250000238418579, "lr": 2.6432391138273493e-07, "epoch": 0.7633128465208234, "percentage": 76.29, "elapsed_time": "3:00:53", "remaining_time": "0:56:13"}
112
+ {"current_steps": 1120, "total_steps": 1455, "loss": 0.5507, "accuracy": 0.7468750476837158, "lr": 2.5668449197860965e-07, "epoch": 0.7701895388318218, "percentage": 76.98, "elapsed_time": "3:02:31", "remaining_time": "0:54:35"}
113
+ {"current_steps": 1130, "total_steps": 1455, "loss": 0.5454, "accuracy": 0.7421875596046448, "lr": 2.490450725744843e-07, "epoch": 0.7770662311428203, "percentage": 77.66, "elapsed_time": "3:04:05", "remaining_time": "0:52:56"}
114
+ {"current_steps": 1140, "total_steps": 1455, "loss": 0.5633, "accuracy": 0.7250000238418579, "lr": 2.41405653170359e-07, "epoch": 0.7839429234538187, "percentage": 78.35, "elapsed_time": "3:05:41", "remaining_time": "0:51:18"}
115
+ {"current_steps": 1150, "total_steps": 1455, "loss": 0.549, "accuracy": 0.739062488079071, "lr": 2.3376623376623376e-07, "epoch": 0.7908196157648171, "percentage": 79.04, "elapsed_time": "3:07:18", "remaining_time": "0:49:40"}
116
+ {"current_steps": 1160, "total_steps": 1455, "loss": 0.5541, "accuracy": 0.714062511920929, "lr": 2.2612681436210848e-07, "epoch": 0.7976963080758155, "percentage": 79.73, "elapsed_time": "3:08:54", "remaining_time": "0:48:02"}
117
+ {"current_steps": 1170, "total_steps": 1455, "loss": 0.5306, "accuracy": 0.737500011920929, "lr": 2.184873949579832e-07, "epoch": 0.8045730003868139, "percentage": 80.41, "elapsed_time": "3:10:29", "remaining_time": "0:46:24"}
118
+ {"current_steps": 1180, "total_steps": 1455, "loss": 0.5633, "accuracy": 0.7171875238418579, "lr": 2.1084797555385788e-07, "epoch": 0.8114496926978123, "percentage": 81.1, "elapsed_time": "3:12:06", "remaining_time": "0:44:46"}
119
+ {"current_steps": 1190, "total_steps": 1455, "loss": 0.5521, "accuracy": 0.721875011920929, "lr": 2.0320855614973262e-07, "epoch": 0.8183263850088107, "percentage": 81.79, "elapsed_time": "3:13:43", "remaining_time": "0:43:08"}
120
+ {"current_steps": 1200, "total_steps": 1455, "loss": 0.5702, "accuracy": 0.7093750238418579, "lr": 1.9556913674560733e-07, "epoch": 0.8252030773198091, "percentage": 82.47, "elapsed_time": "3:15:21", "remaining_time": "0:41:30"}
121
+ {"current_steps": 1210, "total_steps": 1455, "loss": 0.543, "accuracy": 0.7593749761581421, "lr": 1.8792971734148204e-07, "epoch": 0.8320797696308075, "percentage": 83.16, "elapsed_time": "3:16:56", "remaining_time": "0:39:52"}
122
+ {"current_steps": 1220, "total_steps": 1455, "loss": 0.5449, "accuracy": 0.739062488079071, "lr": 1.8029029793735676e-07, "epoch": 0.838956461941806, "percentage": 83.85, "elapsed_time": "3:18:31", "remaining_time": "0:38:14"}
123
+ {"current_steps": 1230, "total_steps": 1455, "loss": 0.535, "accuracy": 0.7484375238418579, "lr": 1.7265087853323147e-07, "epoch": 0.8458331542528044, "percentage": 84.54, "elapsed_time": "3:20:09", "remaining_time": "0:36:36"}
124
+ {"current_steps": 1240, "total_steps": 1455, "loss": 0.5723, "accuracy": 0.7109375596046448, "lr": 1.6501145912910618e-07, "epoch": 0.8527098465638028, "percentage": 85.22, "elapsed_time": "3:21:46", "remaining_time": "0:34:59"}
125
+ {"current_steps": 1250, "total_steps": 1455, "loss": 0.5395, "accuracy": 0.745312511920929, "lr": 1.573720397249809e-07, "epoch": 0.8595865388748012, "percentage": 85.91, "elapsed_time": "3:23:21", "remaining_time": "0:33:21"}
126
+ {"current_steps": 1260, "total_steps": 1455, "loss": 0.5582, "accuracy": 0.7328125238418579, "lr": 1.4973262032085558e-07, "epoch": 0.8664632311857996, "percentage": 86.6, "elapsed_time": "3:24:58", "remaining_time": "0:31:43"}
127
+ {"current_steps": 1270, "total_steps": 1455, "loss": 0.5413, "accuracy": 0.7421875, "lr": 1.4209320091673032e-07, "epoch": 0.873339923496798, "percentage": 87.29, "elapsed_time": "3:26:35", "remaining_time": "0:30:05"}
128
+ {"current_steps": 1280, "total_steps": 1455, "loss": 0.5304, "accuracy": 0.7390625476837158, "lr": 1.3445378151260504e-07, "epoch": 0.8802166158077964, "percentage": 87.97, "elapsed_time": "3:28:12", "remaining_time": "0:28:27"}
129
+ {"current_steps": 1290, "total_steps": 1455, "loss": 0.5305, "accuracy": 0.7437500357627869, "lr": 1.2681436210847975e-07, "epoch": 0.8870933081187948, "percentage": 88.66, "elapsed_time": "3:29:49", "remaining_time": "0:26:50"}
130
+ {"current_steps": 1300, "total_steps": 1455, "loss": 0.5503, "accuracy": 0.7234375476837158, "lr": 1.1917494270435446e-07, "epoch": 0.8939700004297932, "percentage": 89.35, "elapsed_time": "3:31:27", "remaining_time": "0:25:12"}
131
+ {"current_steps": 1310, "total_steps": 1455, "loss": 0.5439, "accuracy": 0.721875011920929, "lr": 1.1153552330022918e-07, "epoch": 0.9008466927407917, "percentage": 90.03, "elapsed_time": "3:33:03", "remaining_time": "0:23:34"}
132
+ {"current_steps": 1320, "total_steps": 1455, "loss": 0.5547, "accuracy": 0.7437499761581421, "lr": 1.038961038961039e-07, "epoch": 0.9077233850517901, "percentage": 90.72, "elapsed_time": "3:34:40", "remaining_time": "0:21:57"}
133
+ {"current_steps": 1330, "total_steps": 1455, "loss": 0.5265, "accuracy": 0.7484375238418579, "lr": 9.62566844919786e-08, "epoch": 0.9146000773627885, "percentage": 91.41, "elapsed_time": "3:36:16", "remaining_time": "0:20:19"}
134
+ {"current_steps": 1340, "total_steps": 1455, "loss": 0.5739, "accuracy": 0.7109375596046448, "lr": 8.861726508785332e-08, "epoch": 0.9214767696737869, "percentage": 92.1, "elapsed_time": "3:37:54", "remaining_time": "0:18:42"}
135
+ {"current_steps": 1350, "total_steps": 1455, "loss": 0.5575, "accuracy": 0.71875, "lr": 8.097784568372803e-08, "epoch": 0.9283534619847853, "percentage": 92.78, "elapsed_time": "3:39:32", "remaining_time": "0:17:04"}
136
+ {"current_steps": 1360, "total_steps": 1455, "loss": 0.5149, "accuracy": 0.7828125357627869, "lr": 7.333842627960276e-08, "epoch": 0.9352301542957837, "percentage": 93.47, "elapsed_time": "3:41:09", "remaining_time": "0:15:26"}
137
+ {"current_steps": 1370, "total_steps": 1455, "loss": 0.5774, "accuracy": 0.7015625238418579, "lr": 6.569900687547746e-08, "epoch": 0.9421068466067821, "percentage": 94.16, "elapsed_time": "3:42:47", "remaining_time": "0:13:49"}
138
+ {"current_steps": 1380, "total_steps": 1455, "loss": 0.5551, "accuracy": 0.723437488079071, "lr": 5.805958747135217e-08, "epoch": 0.9489835389177805, "percentage": 94.85, "elapsed_time": "3:44:24", "remaining_time": "0:12:11"}
139
+ {"current_steps": 1390, "total_steps": 1455, "loss": 0.5424, "accuracy": 0.7109375, "lr": 5.042016806722689e-08, "epoch": 0.955860231228779, "percentage": 95.53, "elapsed_time": "3:46:02", "remaining_time": "0:10:34"}
140
+ {"current_steps": 1400, "total_steps": 1455, "loss": 0.5252, "accuracy": 0.753125011920929, "lr": 4.27807486631016e-08, "epoch": 0.9627369235397774, "percentage": 96.22, "elapsed_time": "3:47:37", "remaining_time": "0:08:56"}
141
+ {"current_steps": 1410, "total_steps": 1455, "loss": 0.4938, "accuracy": 0.792187511920929, "lr": 3.514132925897632e-08, "epoch": 0.9696136158507758, "percentage": 96.91, "elapsed_time": "3:49:13", "remaining_time": "0:07:18"}
142
+ {"current_steps": 1420, "total_steps": 1455, "loss": 0.5229, "accuracy": 0.7468750476837158, "lr": 2.750190985485103e-08, "epoch": 0.9764903081617742, "percentage": 97.59, "elapsed_time": "3:50:49", "remaining_time": "0:05:41"}
143
+ {"current_steps": 1430, "total_steps": 1455, "loss": 0.5485, "accuracy": 0.7250000238418579, "lr": 1.9862490450725743e-08, "epoch": 0.9833670004727726, "percentage": 98.28, "elapsed_time": "3:52:24", "remaining_time": "0:04:03"}
144
+ {"current_steps": 1440, "total_steps": 1455, "loss": 0.5481, "accuracy": 0.715624988079071, "lr": 1.2223071046600458e-08, "epoch": 0.990243692783771, "percentage": 98.97, "elapsed_time": "3:54:00", "remaining_time": "0:02:26"}
145
+ {"current_steps": 1450, "total_steps": 1455, "loss": 0.5363, "accuracy": 0.7593749761581421, "lr": 4.583651642475172e-09, "epoch": 0.9971203850947694, "percentage": 99.66, "elapsed_time": "3:55:36", "remaining_time": "0:00:48"}
146
+ {"current_steps": 1455, "total_steps": 1455, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "3:56:40", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1455,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0068766923109984095,
14
+ "grad_norm": 19.513351103252234,
15
+ "learning_rate": 6.164383561643836e-08,
16
+ "logits/chosen": 0.036775026470422745,
17
+ "logits/rejected": 0.046910036355257034,
18
+ "logps/chosen": -190.58827209472656,
19
+ "logps/rejected": -191.73072814941406,
20
+ "loss": 0.6926,
21
+ "rewards/accuracies": 0.4078124761581421,
22
+ "rewards/chosen": 0.0005772202857770026,
23
+ "rewards/margins": 0.0013222144916653633,
24
+ "rewards/rejected": -0.0007449942640960217,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.013753384621996819,
29
+ "grad_norm": 19.733804967529558,
30
+ "learning_rate": 1.3013698630136985e-07,
31
+ "logits/chosen": 0.05084426328539848,
32
+ "logits/rejected": 0.05570922791957855,
33
+ "logps/chosen": -189.29241943359375,
34
+ "logps/rejected": -189.13442993164062,
35
+ "loss": 0.6939,
36
+ "rewards/accuracies": 0.4843750298023224,
37
+ "rewards/chosen": -0.0005333187873475254,
38
+ "rewards/margins": -0.0012226278195157647,
39
+ "rewards/rejected": 0.0006893089739605784,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.02063007693299523,
44
+ "grad_norm": 20.968724767768286,
45
+ "learning_rate": 1.9863013698630135e-07,
46
+ "logits/chosen": 0.07122799009084702,
47
+ "logits/rejected": 0.059470705687999725,
48
+ "logps/chosen": -200.34930419921875,
49
+ "logps/rejected": -197.34397888183594,
50
+ "loss": 0.6928,
51
+ "rewards/accuracies": 0.5046875476837158,
52
+ "rewards/chosen": -0.0008399286889471114,
53
+ "rewards/margins": 0.0011575755197554827,
54
+ "rewards/rejected": -0.0019975043833255768,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.027506769243993638,
59
+ "grad_norm": 22.344037151449587,
60
+ "learning_rate": 2.671232876712329e-07,
61
+ "logits/chosen": 0.02660662867128849,
62
+ "logits/rejected": 0.02818603627383709,
63
+ "logps/chosen": -198.3375244140625,
64
+ "logps/rejected": -197.72930908203125,
65
+ "loss": 0.6936,
66
+ "rewards/accuracies": 0.4859375059604645,
67
+ "rewards/chosen": -0.0005683950148522854,
68
+ "rewards/margins": -0.0004704779712483287,
69
+ "rewards/rejected": -9.791657794266939e-05,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.03438346155499205,
74
+ "grad_norm": 20.521372371749262,
75
+ "learning_rate": 3.3561643835616436e-07,
76
+ "logits/chosen": 0.06722528487443924,
77
+ "logits/rejected": 0.08071392774581909,
78
+ "logps/chosen": -187.2486114501953,
79
+ "logps/rejected": -185.5479278564453,
80
+ "loss": 0.6931,
81
+ "rewards/accuracies": 0.510937511920929,
82
+ "rewards/chosen": 0.0029008188284933567,
83
+ "rewards/margins": 0.0005296532763168216,
84
+ "rewards/rejected": 0.0023711654357612133,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.04126015386599046,
89
+ "grad_norm": 22.181250645349447,
90
+ "learning_rate": 4.041095890410959e-07,
91
+ "logits/chosen": 0.006281435955315828,
92
+ "logits/rejected": 0.010298961773514748,
93
+ "logps/chosen": -198.05947875976562,
94
+ "logps/rejected": -200.37185668945312,
95
+ "loss": 0.6926,
96
+ "rewards/accuracies": 0.510937511920929,
97
+ "rewards/chosen": 0.006992017850279808,
98
+ "rewards/margins": 0.0015909502981230617,
99
+ "rewards/rejected": 0.005401067901402712,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.048136846176988865,
104
+ "grad_norm": 19.169872791194972,
105
+ "learning_rate": 4.726027397260274e-07,
106
+ "logits/chosen": -0.01618768647313118,
107
+ "logits/rejected": 0.0075270626693964005,
108
+ "logps/chosen": -202.3906707763672,
109
+ "logps/rejected": -200.06686401367188,
110
+ "loss": 0.6929,
111
+ "rewards/accuracies": 0.504687488079071,
112
+ "rewards/chosen": 0.009140972048044205,
113
+ "rewards/margins": 0.0010492438450455666,
114
+ "rewards/rejected": 0.008091727271676064,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.055013538487987276,
119
+ "grad_norm": 19.758724828653662,
120
+ "learning_rate": 5.410958904109589e-07,
121
+ "logits/chosen": 0.06360562890768051,
122
+ "logits/rejected": 0.054235585033893585,
123
+ "logps/chosen": -188.7932586669922,
124
+ "logps/rejected": -184.00587463378906,
125
+ "loss": 0.6907,
126
+ "rewards/accuracies": 0.5718749761581421,
127
+ "rewards/chosen": 0.01619785837829113,
128
+ "rewards/margins": 0.005430372431874275,
129
+ "rewards/rejected": 0.010767485946416855,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.06189023079898569,
134
+ "grad_norm": 20.213997991248295,
135
+ "learning_rate": 6.095890410958904e-07,
136
+ "logits/chosen": 0.042256779968738556,
137
+ "logits/rejected": 0.03444061055779457,
138
+ "logps/chosen": -196.16871643066406,
139
+ "logps/rejected": -194.0621337890625,
140
+ "loss": 0.6918,
141
+ "rewards/accuracies": 0.5234375,
142
+ "rewards/chosen": 0.02520076185464859,
143
+ "rewards/margins": 0.0032487791031599045,
144
+ "rewards/rejected": 0.021951984614133835,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.0687669231099841,
149
+ "grad_norm": 21.372464813538276,
150
+ "learning_rate": 6.78082191780822e-07,
151
+ "logits/chosen": -0.018003107979893684,
152
+ "logits/rejected": -0.023832082748413086,
153
+ "logps/chosen": -197.95404052734375,
154
+ "logps/rejected": -195.9728546142578,
155
+ "loss": 0.6881,
156
+ "rewards/accuracies": 0.5750000476837158,
157
+ "rewards/chosen": 0.046728331595659256,
158
+ "rewards/margins": 0.011084327474236488,
159
+ "rewards/rejected": 0.03564400225877762,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07564361542098251,
164
+ "grad_norm": 20.643367976648022,
165
+ "learning_rate": 7.465753424657533e-07,
166
+ "logits/chosen": 0.0021628281101584435,
167
+ "logits/rejected": 0.010397136211395264,
168
+ "logps/chosen": -191.6479949951172,
169
+ "logps/rejected": -194.17164611816406,
170
+ "loss": 0.6889,
171
+ "rewards/accuracies": 0.5562500357627869,
172
+ "rewards/chosen": 0.05788281559944153,
173
+ "rewards/margins": 0.01013133954256773,
174
+ "rewards/rejected": 0.04775147885084152,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.08252030773198092,
179
+ "grad_norm": 21.164397049657207,
180
+ "learning_rate": 8.150684931506849e-07,
181
+ "logits/chosen": -0.03978746384382248,
182
+ "logits/rejected": -0.023968348279595375,
183
+ "logps/chosen": -200.37274169921875,
184
+ "logps/rejected": -199.3896942138672,
185
+ "loss": 0.686,
186
+ "rewards/accuracies": 0.5703125,
187
+ "rewards/chosen": 0.07369903475046158,
188
+ "rewards/margins": 0.016540035605430603,
189
+ "rewards/rejected": 0.057159002870321274,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08939700004297933,
194
+ "grad_norm": 20.81088307270136,
195
+ "learning_rate": 8.835616438356164e-07,
196
+ "logits/chosen": -0.004022962413728237,
197
+ "logits/rejected": 0.006641650106757879,
198
+ "logps/chosen": -198.2274627685547,
199
+ "logps/rejected": -198.31088256835938,
200
+ "loss": 0.6825,
201
+ "rewards/accuracies": 0.5765625238418579,
202
+ "rewards/chosen": 0.07722726464271545,
203
+ "rewards/margins": 0.024418998509645462,
204
+ "rewards/rejected": 0.05280826985836029,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09627369235397773,
209
+ "grad_norm": 20.466753738955518,
210
+ "learning_rate": 9.520547945205479e-07,
211
+ "logits/chosen": -0.030300267040729523,
212
+ "logits/rejected": -0.027351014316082,
213
+ "logps/chosen": -187.5103302001953,
214
+ "logps/rejected": -186.61094665527344,
215
+ "loss": 0.6835,
216
+ "rewards/accuracies": 0.596875011920929,
217
+ "rewards/chosen": 0.08767089992761612,
218
+ "rewards/margins": 0.02380959689617157,
219
+ "rewards/rejected": 0.06386130303144455,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.10315038466497614,
224
+ "grad_norm": 21.625750402342522,
225
+ "learning_rate": 9.977081741787625e-07,
226
+ "logits/chosen": -0.055144187062978745,
227
+ "logits/rejected": -0.039117198437452316,
228
+ "logps/chosen": -194.8080596923828,
229
+ "logps/rejected": -191.05648803710938,
230
+ "loss": 0.6789,
231
+ "rewards/accuracies": 0.5828125476837158,
232
+ "rewards/chosen": 0.08812759071588516,
233
+ "rewards/margins": 0.03495479375123978,
234
+ "rewards/rejected": 0.05317278951406479,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.11002707697597455,
239
+ "grad_norm": 22.141057438202445,
240
+ "learning_rate": 9.90068754774637e-07,
241
+ "logits/chosen": -0.0353374183177948,
242
+ "logits/rejected": -0.03970889374613762,
243
+ "logps/chosen": -191.93936157226562,
244
+ "logps/rejected": -189.392333984375,
245
+ "loss": 0.6785,
246
+ "rewards/accuracies": 0.5843750238418579,
247
+ "rewards/chosen": 0.08410216122865677,
248
+ "rewards/margins": 0.038218073546886444,
249
+ "rewards/rejected": 0.04588409140706062,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.11690376928697296,
254
+ "grad_norm": 22.165325017656897,
255
+ "learning_rate": 9.824293353705118e-07,
256
+ "logits/chosen": -0.07139798998832703,
257
+ "logits/rejected": -0.05791984125971794,
258
+ "logps/chosen": -193.04006958007812,
259
+ "logps/rejected": -190.68820190429688,
260
+ "loss": 0.6697,
261
+ "rewards/accuracies": 0.6453125476837158,
262
+ "rewards/chosen": 0.09184054285287857,
263
+ "rewards/margins": 0.0603640116751194,
264
+ "rewards/rejected": 0.031476523727178574,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.12378046159797138,
269
+ "grad_norm": 20.869054112054222,
270
+ "learning_rate": 9.747899159663866e-07,
271
+ "logits/chosen": -0.06488940119743347,
272
+ "logits/rejected": -0.07598251849412918,
273
+ "logps/chosen": -186.8896484375,
274
+ "logps/rejected": -184.165283203125,
275
+ "loss": 0.6673,
276
+ "rewards/accuracies": 0.6093750596046448,
277
+ "rewards/chosen": 0.11738091707229614,
278
+ "rewards/margins": 0.06726492196321487,
279
+ "rewards/rejected": 0.05011599883437157,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.1306571539089698,
284
+ "grad_norm": 22.08932184958235,
285
+ "learning_rate": 9.671504965622611e-07,
286
+ "logits/chosen": -0.11792731285095215,
287
+ "logits/rejected": -0.10376260429620743,
288
+ "logps/chosen": -194.56234741210938,
289
+ "logps/rejected": -199.22845458984375,
290
+ "loss": 0.672,
291
+ "rewards/accuracies": 0.6109375357627869,
292
+ "rewards/chosen": 0.14032624661922455,
293
+ "rewards/margins": 0.06233515962958336,
294
+ "rewards/rejected": 0.07799109071493149,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.1375338462199682,
299
+ "grad_norm": 22.536330265073364,
300
+ "learning_rate": 9.59511077158136e-07,
301
+ "logits/chosen": -0.14459270238876343,
302
+ "logits/rejected": -0.14901231229305267,
303
+ "logps/chosen": -183.6916961669922,
304
+ "logps/rejected": -185.39944458007812,
305
+ "loss": 0.6656,
306
+ "rewards/accuracies": 0.614062488079071,
307
+ "rewards/chosen": 0.14809665083885193,
308
+ "rewards/margins": 0.07755547016859055,
309
+ "rewards/rejected": 0.07054118812084198,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.1444105385309666,
314
+ "grad_norm": 22.36629380857143,
315
+ "learning_rate": 9.518716577540107e-07,
316
+ "logits/chosen": -0.15250758826732635,
317
+ "logits/rejected": -0.1362263560295105,
318
+ "logps/chosen": -184.0275421142578,
319
+ "logps/rejected": -182.7425079345703,
320
+ "loss": 0.6553,
321
+ "rewards/accuracies": 0.628125011920929,
322
+ "rewards/chosen": 0.12464237958192825,
323
+ "rewards/margins": 0.10570727288722992,
324
+ "rewards/rejected": 0.018935102969408035,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.15128723084196502,
329
+ "grad_norm": 22.46675762870305,
330
+ "learning_rate": 9.442322383498854e-07,
331
+ "logits/chosen": -0.12515884637832642,
332
+ "logits/rejected": -0.10421467572450638,
333
+ "logps/chosen": -182.95225524902344,
334
+ "logps/rejected": -182.8366241455078,
335
+ "loss": 0.6632,
336
+ "rewards/accuracies": 0.6031250357627869,
337
+ "rewards/chosen": 0.1099134311079979,
338
+ "rewards/margins": 0.08599118143320084,
339
+ "rewards/rejected": 0.02392224781215191,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.15816392315296343,
344
+ "grad_norm": 21.12786861273902,
345
+ "learning_rate": 9.3659281894576e-07,
346
+ "logits/chosen": -0.17644798755645752,
347
+ "logits/rejected": -0.16624829173088074,
348
+ "logps/chosen": -188.5326385498047,
349
+ "logps/rejected": -194.2169952392578,
350
+ "loss": 0.6768,
351
+ "rewards/accuracies": 0.606249988079071,
352
+ "rewards/chosen": 0.08918297290802002,
353
+ "rewards/margins": 0.07856127619743347,
354
+ "rewards/rejected": 0.01062170509248972,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.16504061546396184,
359
+ "grad_norm": 26.07540917024713,
360
+ "learning_rate": 9.289533995416348e-07,
361
+ "logits/chosen": -0.15431278944015503,
362
+ "logits/rejected": -0.12972977757453918,
363
+ "logps/chosen": -194.038818359375,
364
+ "logps/rejected": -197.2062530517578,
365
+ "loss": 0.6448,
366
+ "rewards/accuracies": 0.660937488079071,
367
+ "rewards/chosen": 0.08370951563119888,
368
+ "rewards/margins": 0.13731984794139862,
369
+ "rewards/rejected": -0.053610339760780334,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17191730777496025,
374
+ "grad_norm": 24.572284116149454,
375
+ "learning_rate": 9.213139801375095e-07,
376
+ "logits/chosen": -0.15322308242321014,
377
+ "logits/rejected": -0.13083471357822418,
378
+ "logps/chosen": -195.24945068359375,
379
+ "logps/rejected": -192.50152587890625,
380
+ "loss": 0.672,
381
+ "rewards/accuracies": 0.604687511920929,
382
+ "rewards/chosen": 0.03577382117509842,
383
+ "rewards/margins": 0.08709970116615295,
384
+ "rewards/rejected": -0.05132588744163513,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.17879400008595867,
389
+ "grad_norm": 22.465487345289187,
390
+ "learning_rate": 9.136745607333842e-07,
391
+ "logits/chosen": -0.21423660218715668,
392
+ "logits/rejected": -0.20453476905822754,
393
+ "logps/chosen": -196.3953857421875,
394
+ "logps/rejected": -196.35653686523438,
395
+ "loss": 0.6455,
396
+ "rewards/accuracies": 0.6234374642372131,
397
+ "rewards/chosen": 0.043278180062770844,
398
+ "rewards/margins": 0.15055982768535614,
399
+ "rewards/rejected": -0.10728166252374649,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.18567069239695708,
404
+ "grad_norm": 24.171028726439587,
405
+ "learning_rate": 9.060351413292589e-07,
406
+ "logits/chosen": -0.16084127128124237,
407
+ "logits/rejected": -0.1714678704738617,
408
+ "logps/chosen": -194.72898864746094,
409
+ "logps/rejected": -192.3715057373047,
410
+ "loss": 0.6469,
411
+ "rewards/accuracies": 0.6265625357627869,
412
+ "rewards/chosen": 0.0378761887550354,
413
+ "rewards/margins": 0.15679332613945007,
414
+ "rewards/rejected": -0.11891713738441467,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19254738470795546,
419
+ "grad_norm": 21.694031165646436,
420
+ "learning_rate": 8.983957219251337e-07,
421
+ "logits/chosen": -0.15633784234523773,
422
+ "logits/rejected": -0.139601469039917,
423
+ "logps/chosen": -196.56454467773438,
424
+ "logps/rejected": -196.92361450195312,
425
+ "loss": 0.637,
426
+ "rewards/accuracies": 0.6578124761581421,
427
+ "rewards/chosen": 0.03219035267829895,
428
+ "rewards/margins": 0.17698359489440918,
429
+ "rewards/rejected": -0.14479324221611023,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.19942407701895387,
434
+ "grad_norm": 24.551730142560587,
435
+ "learning_rate": 8.907563025210084e-07,
436
+ "logits/chosen": -0.17860618233680725,
437
+ "logits/rejected": -0.172758549451828,
438
+ "logps/chosen": -191.48568725585938,
439
+ "logps/rejected": -193.51708984375,
440
+ "loss": 0.6617,
441
+ "rewards/accuracies": 0.6078125238418579,
442
+ "rewards/chosen": 0.02595115639269352,
443
+ "rewards/margins": 0.1463395208120346,
444
+ "rewards/rejected": -0.12038837373256683,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.20630076932995228,
449
+ "grad_norm": 22.1511663675151,
450
+ "learning_rate": 8.83116883116883e-07,
451
+ "logits/chosen": -0.22528919577598572,
452
+ "logits/rejected": -0.2300284206867218,
453
+ "logps/chosen": -194.1256103515625,
454
+ "logps/rejected": -194.8380889892578,
455
+ "loss": 0.6393,
456
+ "rewards/accuracies": 0.6484375,
457
+ "rewards/chosen": 0.05988895148038864,
458
+ "rewards/margins": 0.19410541653633118,
459
+ "rewards/rejected": -0.13421647250652313,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.2131774616409507,
464
+ "grad_norm": 22.898634448709206,
465
+ "learning_rate": 8.754774637127578e-07,
466
+ "logits/chosen": -0.17205068469047546,
467
+ "logits/rejected": -0.15463976562023163,
468
+ "logps/chosen": -192.48263549804688,
469
+ "logps/rejected": -199.1364288330078,
470
+ "loss": 0.6353,
471
+ "rewards/accuracies": 0.660937488079071,
472
+ "rewards/chosen": 0.06124072149395943,
473
+ "rewards/margins": 0.2011023461818695,
474
+ "rewards/rejected": -0.139861598610878,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.2200541539519491,
479
+ "grad_norm": 21.26291555492832,
480
+ "learning_rate": 8.678380443086325e-07,
481
+ "logits/chosen": -0.1845797449350357,
482
+ "logits/rejected": -0.18274115025997162,
483
+ "logps/chosen": -194.61219787597656,
484
+ "logps/rejected": -195.93377685546875,
485
+ "loss": 0.6592,
486
+ "rewards/accuracies": 0.604687511920929,
487
+ "rewards/chosen": 0.0389692559838295,
488
+ "rewards/margins": 0.1638563871383667,
489
+ "rewards/rejected": -0.12488711625337601,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.22693084626294752,
494
+ "grad_norm": 23.417446659141998,
495
+ "learning_rate": 8.601986249045072e-07,
496
+ "logits/chosen": -0.18124790489673615,
497
+ "logits/rejected": -0.15224400162696838,
498
+ "logps/chosen": -192.33811950683594,
499
+ "logps/rejected": -201.81790161132812,
500
+ "loss": 0.6347,
501
+ "rewards/accuracies": 0.653124988079071,
502
+ "rewards/chosen": 0.010530395433306694,
503
+ "rewards/margins": 0.3004379868507385,
504
+ "rewards/rejected": -0.2899076044559479,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.23380753857394593,
509
+ "grad_norm": 23.853594643071567,
510
+ "learning_rate": 8.52559205500382e-07,
511
+ "logits/chosen": -0.15507660806179047,
512
+ "logits/rejected": -0.1385350376367569,
513
+ "logps/chosen": -193.850830078125,
514
+ "logps/rejected": -199.759521484375,
515
+ "loss": 0.6401,
516
+ "rewards/accuracies": 0.6484375,
517
+ "rewards/chosen": -0.01992304064333439,
518
+ "rewards/margins": 0.19967934489250183,
519
+ "rewards/rejected": -0.21960237622261047,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.24068423088494434,
524
+ "grad_norm": 19.901838633792355,
525
+ "learning_rate": 8.449197860962567e-07,
526
+ "logits/chosen": -0.1355869621038437,
527
+ "logits/rejected": -0.12776578962802887,
528
+ "logps/chosen": -194.73104858398438,
529
+ "logps/rejected": -194.59083557128906,
530
+ "loss": 0.6335,
531
+ "rewards/accuracies": 0.671875,
532
+ "rewards/chosen": -0.022219305858016014,
533
+ "rewards/margins": 0.22338087856769562,
534
+ "rewards/rejected": -0.24560019373893738,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.24756092319594275,
539
+ "grad_norm": 25.638966187406425,
540
+ "learning_rate": 8.372803666921313e-07,
541
+ "logits/chosen": -0.15089087188243866,
542
+ "logits/rejected": -0.1559664011001587,
543
+ "logps/chosen": -190.36236572265625,
544
+ "logps/rejected": -189.7425537109375,
545
+ "loss": 0.6413,
546
+ "rewards/accuracies": 0.6359375715255737,
547
+ "rewards/chosen": -0.05937535688281059,
548
+ "rewards/margins": 0.2122623175382614,
549
+ "rewards/rejected": -0.2716377079486847,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.25443761550694116,
554
+ "grad_norm": 19.325413063698168,
555
+ "learning_rate": 8.296409472880061e-07,
556
+ "logits/chosen": -0.11108885705471039,
557
+ "logits/rejected": -0.08246968686580658,
558
+ "logps/chosen": -192.77230834960938,
559
+ "logps/rejected": -197.00479125976562,
560
+ "loss": 0.6349,
561
+ "rewards/accuracies": 0.651562511920929,
562
+ "rewards/chosen": -0.07588831335306168,
563
+ "rewards/margins": 0.23621290922164917,
564
+ "rewards/rejected": -0.31210121512413025,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.2613143078179396,
569
+ "grad_norm": 22.275308373094646,
570
+ "learning_rate": 8.220015278838807e-07,
571
+ "logits/chosen": -0.13851581513881683,
572
+ "logits/rejected": -0.12797267735004425,
573
+ "logps/chosen": -202.27577209472656,
574
+ "logps/rejected": -202.86090087890625,
575
+ "loss": 0.6278,
576
+ "rewards/accuracies": 0.6578125357627869,
577
+ "rewards/chosen": -0.08238591253757477,
578
+ "rewards/margins": 0.2427002489566803,
579
+ "rewards/rejected": -0.32508617639541626,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.268191000128938,
584
+ "grad_norm": 22.17811856676897,
585
+ "learning_rate": 8.143621084797555e-07,
586
+ "logits/chosen": -0.17540393769741058,
587
+ "logits/rejected": -0.1724541336297989,
588
+ "logps/chosen": -188.11595153808594,
589
+ "logps/rejected": -193.10107421875,
590
+ "loss": 0.6311,
591
+ "rewards/accuracies": 0.6484375,
592
+ "rewards/chosen": -0.09082019329071045,
593
+ "rewards/margins": 0.2361401468515396,
594
+ "rewards/rejected": -0.32696035504341125,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.2750676924399364,
599
+ "grad_norm": 22.797709692683508,
600
+ "learning_rate": 8.067226890756303e-07,
601
+ "logits/chosen": -0.17259512841701508,
602
+ "logits/rejected": -0.16403602063655853,
603
+ "logps/chosen": -191.74493408203125,
604
+ "logps/rejected": -199.08279418945312,
605
+ "loss": 0.6176,
606
+ "rewards/accuracies": 0.6703125238418579,
607
+ "rewards/chosen": -0.12271551042795181,
608
+ "rewards/margins": 0.2966160476207733,
609
+ "rewards/rejected": -0.41933155059814453,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.2819443847509348,
614
+ "grad_norm": 20.345985795599848,
615
+ "learning_rate": 7.990832696715049e-07,
616
+ "logits/chosen": -0.16652648150920868,
617
+ "logits/rejected": -0.1538589745759964,
618
+ "logps/chosen": -195.2452392578125,
619
+ "logps/rejected": -201.4396209716797,
620
+ "loss": 0.654,
621
+ "rewards/accuracies": 0.6562500596046448,
622
+ "rewards/chosen": -0.1387416571378708,
623
+ "rewards/margins": 0.19358447194099426,
624
+ "rewards/rejected": -0.33232617378234863,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.2888210770619332,
629
+ "grad_norm": 22.597861375165436,
630
+ "learning_rate": 7.914438502673797e-07,
631
+ "logits/chosen": -0.19070886075496674,
632
+ "logits/rejected": -0.1932705044746399,
633
+ "logps/chosen": -189.9697265625,
634
+ "logps/rejected": -193.302734375,
635
+ "loss": 0.6166,
636
+ "rewards/accuracies": 0.6484375,
637
+ "rewards/chosen": -0.07254549115896225,
638
+ "rewards/margins": 0.27427053451538086,
639
+ "rewards/rejected": -0.3468160331249237,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.29569776937293163,
644
+ "grad_norm": 19.163014642636924,
645
+ "learning_rate": 7.838044308632544e-07,
646
+ "logits/chosen": -0.19799278676509857,
647
+ "logits/rejected": -0.1733386218547821,
648
+ "logps/chosen": -189.64312744140625,
649
+ "logps/rejected": -192.6612548828125,
650
+ "loss": 0.6121,
651
+ "rewards/accuracies": 0.6812499761581421,
652
+ "rewards/chosen": -0.10480398684740067,
653
+ "rewards/margins": 0.3255974352359772,
654
+ "rewards/rejected": -0.43040144443511963,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.30257446168393004,
659
+ "grad_norm": 23.83020212258771,
660
+ "learning_rate": 7.76165011459129e-07,
661
+ "logits/chosen": -0.23343642055988312,
662
+ "logits/rejected": -0.21432968974113464,
663
+ "logps/chosen": -190.7268829345703,
664
+ "logps/rejected": -194.999267578125,
665
+ "loss": 0.6397,
666
+ "rewards/accuracies": 0.65625,
667
+ "rewards/chosen": -0.1279398649930954,
668
+ "rewards/margins": 0.24956779181957245,
669
+ "rewards/rejected": -0.37750762701034546,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.30945115399492845,
674
+ "grad_norm": 24.165057101867006,
675
+ "learning_rate": 7.685255920550038e-07,
676
+ "logits/chosen": -0.2023199200630188,
677
+ "logits/rejected": -0.19855788350105286,
678
+ "logps/chosen": -198.74795532226562,
679
+ "logps/rejected": -198.89248657226562,
680
+ "loss": 0.6273,
681
+ "rewards/accuracies": 0.6640625,
682
+ "rewards/chosen": -0.10882572084665298,
683
+ "rewards/margins": 0.29104942083358765,
684
+ "rewards/rejected": -0.39987513422966003,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.31632784630592686,
689
+ "grad_norm": 19.17712769364233,
690
+ "learning_rate": 7.608861726508786e-07,
691
+ "logits/chosen": -0.23141232132911682,
692
+ "logits/rejected": -0.1951158493757248,
693
+ "logps/chosen": -189.33047485351562,
694
+ "logps/rejected": -193.03457641601562,
695
+ "loss": 0.6276,
696
+ "rewards/accuracies": 0.6343749761581421,
697
+ "rewards/chosen": -0.11822903901338577,
698
+ "rewards/margins": 0.27355122566223145,
699
+ "rewards/rejected": -0.3917802572250366,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.3232045386169253,
704
+ "grad_norm": 22.019435639102824,
705
+ "learning_rate": 7.532467532467532e-07,
706
+ "logits/chosen": -0.18933194875717163,
707
+ "logits/rejected": -0.1771559715270996,
708
+ "logps/chosen": -186.64694213867188,
709
+ "logps/rejected": -192.64498901367188,
710
+ "loss": 0.6153,
711
+ "rewards/accuracies": 0.6609375476837158,
712
+ "rewards/chosen": -0.13781782984733582,
713
+ "rewards/margins": 0.3126053214073181,
714
+ "rewards/rejected": -0.45042312145233154,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.3300812309279237,
719
+ "grad_norm": 23.981246146235584,
720
+ "learning_rate": 7.45607333842628e-07,
721
+ "logits/chosen": -0.18081098794937134,
722
+ "logits/rejected": -0.159798726439476,
723
+ "logps/chosen": -196.7408905029297,
724
+ "logps/rejected": -201.7342071533203,
725
+ "loss": 0.6265,
726
+ "rewards/accuracies": 0.667187511920929,
727
+ "rewards/chosen": -0.14344578981399536,
728
+ "rewards/margins": 0.3393925428390503,
729
+ "rewards/rejected": -0.48283830285072327,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.3369579232389221,
734
+ "grad_norm": 21.768637428940725,
735
+ "learning_rate": 7.379679144385026e-07,
736
+ "logits/chosen": -0.19270475208759308,
737
+ "logits/rejected": -0.17529691755771637,
738
+ "logps/chosen": -196.528564453125,
739
+ "logps/rejected": -198.35684204101562,
740
+ "loss": 0.5946,
741
+ "rewards/accuracies": 0.6828124523162842,
742
+ "rewards/chosen": -0.10522855073213577,
743
+ "rewards/margins": 0.3783465623855591,
744
+ "rewards/rejected": -0.48357510566711426,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.3438346155499205,
749
+ "grad_norm": 24.44973419066558,
750
+ "learning_rate": 7.303284950343773e-07,
751
+ "logits/chosen": -0.23971419036388397,
752
+ "logits/rejected": -0.20786018669605255,
753
+ "logps/chosen": -197.9569854736328,
754
+ "logps/rejected": -203.95391845703125,
755
+ "loss": 0.6099,
756
+ "rewards/accuracies": 0.6796875,
757
+ "rewards/chosen": -0.19895704090595245,
758
+ "rewards/margins": 0.350207656621933,
759
+ "rewards/rejected": -0.5491647124290466,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.3507113078609189,
764
+ "grad_norm": 20.47563026640517,
765
+ "learning_rate": 7.226890756302521e-07,
766
+ "logits/chosen": -0.23047102987766266,
767
+ "logits/rejected": -0.20306196808815002,
768
+ "logps/chosen": -185.67425537109375,
769
+ "logps/rejected": -194.40451049804688,
770
+ "loss": 0.6174,
771
+ "rewards/accuracies": 0.660937488079071,
772
+ "rewards/chosen": -0.12340579181909561,
773
+ "rewards/margins": 0.32509928941726685,
774
+ "rewards/rejected": -0.44850510358810425,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.35758800017191733,
779
+ "grad_norm": 25.454418717569137,
780
+ "learning_rate": 7.150496562261268e-07,
781
+ "logits/chosen": -0.1873900145292282,
782
+ "logits/rejected": -0.15569299459457397,
783
+ "logps/chosen": -192.04225158691406,
784
+ "logps/rejected": -197.125732421875,
785
+ "loss": 0.5978,
786
+ "rewards/accuracies": 0.6890624761581421,
787
+ "rewards/chosen": -0.13839715719223022,
788
+ "rewards/margins": 0.3862819969654083,
789
+ "rewards/rejected": -0.5246791243553162,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.36446469248291574,
794
+ "grad_norm": 24.079811094829985,
795
+ "learning_rate": 7.074102368220015e-07,
796
+ "logits/chosen": -0.17220811545848846,
797
+ "logits/rejected": -0.13604801893234253,
798
+ "logps/chosen": -195.03099060058594,
799
+ "logps/rejected": -202.47994995117188,
800
+ "loss": 0.5866,
801
+ "rewards/accuracies": 0.7171875238418579,
802
+ "rewards/chosen": -0.1480688452720642,
803
+ "rewards/margins": 0.4154379665851593,
804
+ "rewards/rejected": -0.5635067224502563,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.37134138479391415,
809
+ "grad_norm": 23.58498750162372,
810
+ "learning_rate": 6.997708174178763e-07,
811
+ "logits/chosen": -0.18907414376735687,
812
+ "logits/rejected": -0.1850075125694275,
813
+ "logps/chosen": -195.14674377441406,
814
+ "logps/rejected": -201.75511169433594,
815
+ "loss": 0.6108,
816
+ "rewards/accuracies": 0.6937500238418579,
817
+ "rewards/chosen": -0.16248857975006104,
818
+ "rewards/margins": 0.37109190225601196,
819
+ "rewards/rejected": -0.5335805416107178,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.37821807710491256,
824
+ "grad_norm": 21.771702608666157,
825
+ "learning_rate": 6.92131398013751e-07,
826
+ "logits/chosen": -0.2303890883922577,
827
+ "logits/rejected": -0.18967962265014648,
828
+ "logps/chosen": -195.48226928710938,
829
+ "logps/rejected": -203.158447265625,
830
+ "loss": 0.6143,
831
+ "rewards/accuracies": 0.6703125238418579,
832
+ "rewards/chosen": -0.1789567768573761,
833
+ "rewards/margins": 0.3507557511329651,
834
+ "rewards/rejected": -0.5297124981880188,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.3850947694159109,
839
+ "grad_norm": 22.729561095188803,
840
+ "learning_rate": 6.844919786096256e-07,
841
+ "logits/chosen": -0.20701324939727783,
842
+ "logits/rejected": -0.18544885516166687,
843
+ "logps/chosen": -198.18287658691406,
844
+ "logps/rejected": -206.41525268554688,
845
+ "loss": 0.6138,
846
+ "rewards/accuracies": 0.6750000715255737,
847
+ "rewards/chosen": -0.1394013613462448,
848
+ "rewards/margins": 0.3607535660266876,
849
+ "rewards/rejected": -0.500154972076416,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.39197146172690933,
854
+ "grad_norm": 21.747354835708766,
855
+ "learning_rate": 6.768525592055004e-07,
856
+ "logits/chosen": -0.19353720545768738,
857
+ "logits/rejected": -0.17840342223644257,
858
+ "logps/chosen": -186.3765869140625,
859
+ "logps/rejected": -196.66709899902344,
860
+ "loss": 0.6166,
861
+ "rewards/accuracies": 0.667187511920929,
862
+ "rewards/chosen": -0.19582639634609222,
863
+ "rewards/margins": 0.3339621126651764,
864
+ "rewards/rejected": -0.5297885537147522,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.39884815403790774,
869
+ "grad_norm": 20.847290517267425,
870
+ "learning_rate": 6.69213139801375e-07,
871
+ "logits/chosen": -0.20126751065254211,
872
+ "logits/rejected": -0.1916297972202301,
873
+ "logps/chosen": -195.2719268798828,
874
+ "logps/rejected": -197.80516052246094,
875
+ "loss": 0.5786,
876
+ "rewards/accuracies": 0.6968749761581421,
877
+ "rewards/chosen": -0.1558343470096588,
878
+ "rewards/margins": 0.4313123822212219,
879
+ "rewards/rejected": -0.5871467590332031,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.40572484634890615,
884
+ "grad_norm": 25.220349314015735,
885
+ "learning_rate": 6.615737203972498e-07,
886
+ "logits/chosen": -0.21005675196647644,
887
+ "logits/rejected": -0.18306328356266022,
888
+ "logps/chosen": -201.1265869140625,
889
+ "logps/rejected": -206.52740478515625,
890
+ "loss": 0.6123,
891
+ "rewards/accuracies": 0.6640625,
892
+ "rewards/chosen": -0.14514455199241638,
893
+ "rewards/margins": 0.37125253677368164,
894
+ "rewards/rejected": -0.5163971185684204,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.41260153865990457,
899
+ "grad_norm": 23.09878281707089,
900
+ "learning_rate": 6.539343009931245e-07,
901
+ "logits/chosen": -0.2232796549797058,
902
+ "logits/rejected": -0.20235514640808105,
903
+ "logps/chosen": -197.15594482421875,
904
+ "logps/rejected": -204.2799530029297,
905
+ "loss": 0.5818,
906
+ "rewards/accuracies": 0.6875,
907
+ "rewards/chosen": -0.10441678762435913,
908
+ "rewards/margins": 0.4412200152873993,
909
+ "rewards/rejected": -0.5456368327140808,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.419478230970903,
914
+ "grad_norm": 21.299223036157755,
915
+ "learning_rate": 6.462948815889992e-07,
916
+ "logits/chosen": -0.19436593353748322,
917
+ "logits/rejected": -0.1735837459564209,
918
+ "logps/chosen": -192.80274963378906,
919
+ "logps/rejected": -206.69189453125,
920
+ "loss": 0.5839,
921
+ "rewards/accuracies": 0.7078125476837158,
922
+ "rewards/chosen": -0.16161832213401794,
923
+ "rewards/margins": 0.43346843123435974,
924
+ "rewards/rejected": -0.5950866937637329,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.4263549232819014,
929
+ "grad_norm": 23.27788758526324,
930
+ "learning_rate": 6.386554621848739e-07,
931
+ "logits/chosen": -0.20898228883743286,
932
+ "logits/rejected": -0.18023236095905304,
933
+ "logps/chosen": -202.06637573242188,
934
+ "logps/rejected": -208.60118103027344,
935
+ "loss": 0.6254,
936
+ "rewards/accuracies": 0.6593750715255737,
937
+ "rewards/chosen": -0.18689092993736267,
938
+ "rewards/margins": 0.35333251953125,
939
+ "rewards/rejected": -0.5402234792709351,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.4332316155928998,
944
+ "grad_norm": 19.046089044171406,
945
+ "learning_rate": 6.310160427807486e-07,
946
+ "logits/chosen": -0.2245250940322876,
947
+ "logits/rejected": -0.20658770203590393,
948
+ "logps/chosen": -193.53053283691406,
949
+ "logps/rejected": -202.41139221191406,
950
+ "loss": 0.6031,
951
+ "rewards/accuracies": 0.7062500715255737,
952
+ "rewards/chosen": -0.2109271138906479,
953
+ "rewards/margins": 0.42086154222488403,
954
+ "rewards/rejected": -0.6317886114120483,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.4401083079038982,
959
+ "grad_norm": 22.096797282994814,
960
+ "learning_rate": 6.233766233766233e-07,
961
+ "logits/chosen": -0.2112444043159485,
962
+ "logits/rejected": -0.20102985203266144,
963
+ "logps/chosen": -198.9567413330078,
964
+ "logps/rejected": -202.45970153808594,
965
+ "loss": 0.5985,
966
+ "rewards/accuracies": 0.7140624523162842,
967
+ "rewards/chosen": -0.16410329937934875,
968
+ "rewards/margins": 0.40336376428604126,
969
+ "rewards/rejected": -0.5674670338630676,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.4469850002148966,
974
+ "grad_norm": 24.894702297139187,
975
+ "learning_rate": 6.157372039724981e-07,
976
+ "logits/chosen": -0.24385106563568115,
977
+ "logits/rejected": -0.2102491557598114,
978
+ "logps/chosen": -204.98455810546875,
979
+ "logps/rejected": -211.8107147216797,
980
+ "loss": 0.599,
981
+ "rewards/accuracies": 0.6578125357627869,
982
+ "rewards/chosen": -0.2059740573167801,
983
+ "rewards/margins": 0.4995943307876587,
984
+ "rewards/rejected": -0.7055683732032776,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.45386169252589503,
989
+ "grad_norm": 22.68581004612934,
990
+ "learning_rate": 6.080977845683728e-07,
991
+ "logits/chosen": -0.21157239377498627,
992
+ "logits/rejected": -0.1900298148393631,
993
+ "logps/chosen": -188.3769073486328,
994
+ "logps/rejected": -193.0936279296875,
995
+ "loss": 0.6192,
996
+ "rewards/accuracies": 0.6859375238418579,
997
+ "rewards/chosen": -0.22981426119804382,
998
+ "rewards/margins": 0.36119741201400757,
999
+ "rewards/rejected": -0.591011643409729,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.46073838483689344,
1004
+ "grad_norm": 20.403756461942695,
1005
+ "learning_rate": 6.004583651642475e-07,
1006
+ "logits/chosen": -0.21049100160598755,
1007
+ "logits/rejected": -0.1666809320449829,
1008
+ "logps/chosen": -197.26670837402344,
1009
+ "logps/rejected": -201.7057342529297,
1010
+ "loss": 0.5748,
1011
+ "rewards/accuracies": 0.6937500238418579,
1012
+ "rewards/chosen": -0.16547085344791412,
1013
+ "rewards/margins": 0.5076817274093628,
1014
+ "rewards/rejected": -0.6731526255607605,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.46761507714789186,
1019
+ "grad_norm": 21.094570180421684,
1020
+ "learning_rate": 5.928189457601223e-07,
1021
+ "logits/chosen": -0.21226650476455688,
1022
+ "logits/rejected": -0.1999017894268036,
1023
+ "logps/chosen": -199.63485717773438,
1024
+ "logps/rejected": -206.0372314453125,
1025
+ "loss": 0.5868,
1026
+ "rewards/accuracies": 0.6843750476837158,
1027
+ "rewards/chosen": -0.1587166041135788,
1028
+ "rewards/margins": 0.4724137485027313,
1029
+ "rewards/rejected": -0.6311303377151489,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.47449176945889027,
1034
+ "grad_norm": 19.759234266931482,
1035
+ "learning_rate": 5.851795263559969e-07,
1036
+ "logits/chosen": -0.21826893091201782,
1037
+ "logits/rejected": -0.1906730979681015,
1038
+ "logps/chosen": -192.1505126953125,
1039
+ "logps/rejected": -197.7865447998047,
1040
+ "loss": 0.5795,
1041
+ "rewards/accuracies": 0.6875,
1042
+ "rewards/chosen": -0.1622937172651291,
1043
+ "rewards/margins": 0.4853915572166443,
1044
+ "rewards/rejected": -0.6476852297782898,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.4813684617698887,
1049
+ "grad_norm": 23.644038409945107,
1050
+ "learning_rate": 5.775401069518716e-07,
1051
+ "logits/chosen": -0.22725778818130493,
1052
+ "logits/rejected": -0.22313261032104492,
1053
+ "logps/chosen": -197.45359802246094,
1054
+ "logps/rejected": -204.32212829589844,
1055
+ "loss": 0.6131,
1056
+ "rewards/accuracies": 0.7015625238418579,
1057
+ "rewards/chosen": -0.20547083020210266,
1058
+ "rewards/margins": 0.406261146068573,
1059
+ "rewards/rejected": -0.611732006072998,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.4882451540808871,
1064
+ "grad_norm": 24.828384695442697,
1065
+ "learning_rate": 5.699006875477463e-07,
1066
+ "logits/chosen": -0.26735788583755493,
1067
+ "logits/rejected": -0.2615441083908081,
1068
+ "logps/chosen": -194.3697052001953,
1069
+ "logps/rejected": -199.2198486328125,
1070
+ "loss": 0.5876,
1071
+ "rewards/accuracies": 0.692187488079071,
1072
+ "rewards/chosen": -0.15525513887405396,
1073
+ "rewards/margins": 0.46965569257736206,
1074
+ "rewards/rejected": -0.624910831451416,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.4951218463918855,
1079
+ "grad_norm": 25.35095914324955,
1080
+ "learning_rate": 5.622612681436211e-07,
1081
+ "logits/chosen": -0.28218579292297363,
1082
+ "logits/rejected": -0.24854585528373718,
1083
+ "logps/chosen": -194.7863006591797,
1084
+ "logps/rejected": -206.6715087890625,
1085
+ "loss": 0.5791,
1086
+ "rewards/accuracies": 0.7000000476837158,
1087
+ "rewards/chosen": -0.16084975004196167,
1088
+ "rewards/margins": 0.49476325511932373,
1089
+ "rewards/rejected": -0.6556130647659302,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.5019985387028839,
1094
+ "grad_norm": 22.80592965472249,
1095
+ "learning_rate": 5.546218487394958e-07,
1096
+ "logits/chosen": -0.2643795609474182,
1097
+ "logits/rejected": -0.23982766270637512,
1098
+ "logps/chosen": -207.25701904296875,
1099
+ "logps/rejected": -213.31893920898438,
1100
+ "loss": 0.7215,
1101
+ "rewards/accuracies": 0.6953125,
1102
+ "rewards/chosen": -0.31397566199302673,
1103
+ "rewards/margins": 0.3858318328857422,
1104
+ "rewards/rejected": -0.6998074650764465,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5088752310138823,
1109
+ "grad_norm": 21.365270293353195,
1110
+ "learning_rate": 5.469824293353705e-07,
1111
+ "logits/chosen": -0.3060339391231537,
1112
+ "logits/rejected": -0.27944815158843994,
1113
+ "logps/chosen": -199.2218780517578,
1114
+ "logps/rejected": -207.60980224609375,
1115
+ "loss": 0.5916,
1116
+ "rewards/accuracies": 0.6875,
1117
+ "rewards/chosen": -0.21099844574928284,
1118
+ "rewards/margins": 0.4499368667602539,
1119
+ "rewards/rejected": -0.6609352827072144,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5157519233248807,
1124
+ "grad_norm": 24.66364313570442,
1125
+ "learning_rate": 5.393430099312452e-07,
1126
+ "logits/chosen": -0.25179368257522583,
1127
+ "logits/rejected": -0.2336534559726715,
1128
+ "logps/chosen": -195.29815673828125,
1129
+ "logps/rejected": -199.34864807128906,
1130
+ "loss": 0.6025,
1131
+ "rewards/accuracies": 0.671875,
1132
+ "rewards/chosen": -0.22294992208480835,
1133
+ "rewards/margins": 0.449633926153183,
1134
+ "rewards/rejected": -0.6725838780403137,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.5226286156358791,
1139
+ "grad_norm": 21.696519274645965,
1140
+ "learning_rate": 5.317035905271199e-07,
1141
+ "logits/chosen": -0.23188501596450806,
1142
+ "logits/rejected": -0.18740838766098022,
1143
+ "logps/chosen": -194.13967895507812,
1144
+ "logps/rejected": -205.78663635253906,
1145
+ "loss": 0.5554,
1146
+ "rewards/accuracies": 0.7125000357627869,
1147
+ "rewards/chosen": -0.19764624536037445,
1148
+ "rewards/margins": 0.5703191161155701,
1149
+ "rewards/rejected": -0.7679654359817505,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5295053079468776,
1154
+ "grad_norm": 19.823323655353022,
1155
+ "learning_rate": 5.240641711229946e-07,
1156
+ "logits/chosen": -0.22654050588607788,
1157
+ "logits/rejected": -0.21553871035575867,
1158
+ "logps/chosen": -195.4784698486328,
1159
+ "logps/rejected": -201.73641967773438,
1160
+ "loss": 0.6091,
1161
+ "rewards/accuracies": 0.6609375476837158,
1162
+ "rewards/chosen": -0.24171432852745056,
1163
+ "rewards/margins": 0.4162150025367737,
1164
+ "rewards/rejected": -0.6579293012619019,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.536382000257876,
1169
+ "grad_norm": 27.677161536481233,
1170
+ "learning_rate": 5.164247517188694e-07,
1171
+ "logits/chosen": -0.25750795006752014,
1172
+ "logits/rejected": -0.21803677082061768,
1173
+ "logps/chosen": -189.33285522460938,
1174
+ "logps/rejected": -200.5758056640625,
1175
+ "loss": 0.5884,
1176
+ "rewards/accuracies": 0.7109375,
1177
+ "rewards/chosen": -0.2196999341249466,
1178
+ "rewards/margins": 0.5162926316261292,
1179
+ "rewards/rejected": -0.7359925508499146,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5432586925688744,
1184
+ "grad_norm": 23.869787766709965,
1185
+ "learning_rate": 5.08785332314744e-07,
1186
+ "logits/chosen": -0.22552648186683655,
1187
+ "logits/rejected": -0.2141103446483612,
1188
+ "logps/chosen": -198.36676025390625,
1189
+ "logps/rejected": -209.24111938476562,
1190
+ "loss": 0.5752,
1191
+ "rewards/accuracies": 0.715624988079071,
1192
+ "rewards/chosen": -0.22112296521663666,
1193
+ "rewards/margins": 0.5205415487289429,
1194
+ "rewards/rejected": -0.7416645288467407,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5501353848798728,
1199
+ "grad_norm": 21.442740342884818,
1200
+ "learning_rate": 5.011459129106188e-07,
1201
+ "logits/chosen": -0.24971872568130493,
1202
+ "logits/rejected": -0.23532649874687195,
1203
+ "logps/chosen": -195.80618286132812,
1204
+ "logps/rejected": -204.13272094726562,
1205
+ "loss": 0.5837,
1206
+ "rewards/accuracies": 0.7171875238418579,
1207
+ "rewards/chosen": -0.21100682020187378,
1208
+ "rewards/margins": 0.48472172021865845,
1209
+ "rewards/rejected": -0.695728600025177,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5570120771908712,
1214
+ "grad_norm": 23.649465420938917,
1215
+ "learning_rate": 4.935064935064935e-07,
1216
+ "logits/chosen": -0.20772293210029602,
1217
+ "logits/rejected": -0.1990753710269928,
1218
+ "logps/chosen": -190.77322387695312,
1219
+ "logps/rejected": -201.27334594726562,
1220
+ "loss": 0.5731,
1221
+ "rewards/accuracies": 0.684374988079071,
1222
+ "rewards/chosen": -0.26777908205986023,
1223
+ "rewards/margins": 0.5119359493255615,
1224
+ "rewards/rejected": -0.7797149419784546,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5638887695018696,
1229
+ "grad_norm": 20.834632432860804,
1230
+ "learning_rate": 4.858670741023682e-07,
1231
+ "logits/chosen": -0.23629331588745117,
1232
+ "logits/rejected": -0.21014532446861267,
1233
+ "logps/chosen": -194.09735107421875,
1234
+ "logps/rejected": -204.4751434326172,
1235
+ "loss": 0.5683,
1236
+ "rewards/accuracies": 0.7140624523162842,
1237
+ "rewards/chosen": -0.2610727548599243,
1238
+ "rewards/margins": 0.5246341824531555,
1239
+ "rewards/rejected": -0.7857069373130798,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.570765461812868,
1244
+ "grad_norm": 21.895485023628986,
1245
+ "learning_rate": 4.782276546982429e-07,
1246
+ "logits/chosen": -0.21753302216529846,
1247
+ "logits/rejected": -0.2036309391260147,
1248
+ "logps/chosen": -202.1611328125,
1249
+ "logps/rejected": -211.5482635498047,
1250
+ "loss": 0.5698,
1251
+ "rewards/accuracies": 0.698437511920929,
1252
+ "rewards/chosen": -0.25107091665267944,
1253
+ "rewards/margins": 0.5232045650482178,
1254
+ "rewards/rejected": -0.774275541305542,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5776421541238664,
1259
+ "grad_norm": 21.443223828952902,
1260
+ "learning_rate": 4.705882352941176e-07,
1261
+ "logits/chosen": -0.2529454529285431,
1262
+ "logits/rejected": -0.2238178700208664,
1263
+ "logps/chosen": -196.12887573242188,
1264
+ "logps/rejected": -207.17364501953125,
1265
+ "loss": 0.5546,
1266
+ "rewards/accuracies": 0.729687511920929,
1267
+ "rewards/chosen": -0.22495540976524353,
1268
+ "rewards/margins": 0.6076815128326416,
1269
+ "rewards/rejected": -0.8326369524002075,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.5845188464348648,
1274
+ "grad_norm": 21.225876530865918,
1275
+ "learning_rate": 4.6294881588999233e-07,
1276
+ "logits/chosen": -0.28026488423347473,
1277
+ "logits/rejected": -0.266289621591568,
1278
+ "logps/chosen": -201.97750854492188,
1279
+ "logps/rejected": -212.987548828125,
1280
+ "loss": 0.5735,
1281
+ "rewards/accuracies": 0.7265625,
1282
+ "rewards/chosen": -0.26331812143325806,
1283
+ "rewards/margins": 0.582595705986023,
1284
+ "rewards/rejected": -0.8459138870239258,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.5913955387458633,
1289
+ "grad_norm": 28.249065086967942,
1290
+ "learning_rate": 4.553093964858671e-07,
1291
+ "logits/chosen": -0.2620088458061218,
1292
+ "logits/rejected": -0.23113124072551727,
1293
+ "logps/chosen": -196.2758331298828,
1294
+ "logps/rejected": -200.11190795898438,
1295
+ "loss": 0.5697,
1296
+ "rewards/accuracies": 0.706250011920929,
1297
+ "rewards/chosen": -0.2763211727142334,
1298
+ "rewards/margins": 0.5339369177818298,
1299
+ "rewards/rejected": -0.8102580904960632,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.5982722310568617,
1304
+ "grad_norm": 19.798351649647564,
1305
+ "learning_rate": 4.4766997708174176e-07,
1306
+ "logits/chosen": -0.29130977392196655,
1307
+ "logits/rejected": -0.2608138918876648,
1308
+ "logps/chosen": -205.44146728515625,
1309
+ "logps/rejected": -213.7480926513672,
1310
+ "loss": 0.5685,
1311
+ "rewards/accuracies": 0.7234375476837158,
1312
+ "rewards/chosen": -0.26992180943489075,
1313
+ "rewards/margins": 0.5605109930038452,
1314
+ "rewards/rejected": -0.8304328322410583,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.6051489233678601,
1319
+ "grad_norm": 20.78214558835817,
1320
+ "learning_rate": 4.4003055767761647e-07,
1321
+ "logits/chosen": -0.2577701508998871,
1322
+ "logits/rejected": -0.23245316743850708,
1323
+ "logps/chosen": -198.88690185546875,
1324
+ "logps/rejected": -205.05946350097656,
1325
+ "loss": 0.5899,
1326
+ "rewards/accuracies": 0.6812500357627869,
1327
+ "rewards/chosen": -0.27420300245285034,
1328
+ "rewards/margins": 0.5197823643684387,
1329
+ "rewards/rejected": -0.7939853668212891,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6120256156788585,
1334
+ "grad_norm": 22.312291249160385,
1335
+ "learning_rate": 4.3239113827349124e-07,
1336
+ "logits/chosen": -0.2274932563304901,
1337
+ "logits/rejected": -0.21958866715431213,
1338
+ "logps/chosen": -204.72377014160156,
1339
+ "logps/rejected": -209.85736083984375,
1340
+ "loss": 0.5723,
1341
+ "rewards/accuracies": 0.715624988079071,
1342
+ "rewards/chosen": -0.25001946091651917,
1343
+ "rewards/margins": 0.5583000183105469,
1344
+ "rewards/rejected": -0.8083194494247437,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.6189023079898569,
1349
+ "grad_norm": 25.49630333289921,
1350
+ "learning_rate": 4.247517188693659e-07,
1351
+ "logits/chosen": -0.24299953877925873,
1352
+ "logits/rejected": -0.2256893813610077,
1353
+ "logps/chosen": -208.52427673339844,
1354
+ "logps/rejected": -210.1595001220703,
1355
+ "loss": 0.5815,
1356
+ "rewards/accuracies": 0.7140624523162842,
1357
+ "rewards/chosen": -0.2674937844276428,
1358
+ "rewards/margins": 0.528352677822113,
1359
+ "rewards/rejected": -0.7958465218544006,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.6257790003008553,
1364
+ "grad_norm": 20.53779533869322,
1365
+ "learning_rate": 4.171122994652406e-07,
1366
+ "logits/chosen": -0.248798668384552,
1367
+ "logits/rejected": -0.23299312591552734,
1368
+ "logps/chosen": -202.05685424804688,
1369
+ "logps/rejected": -212.53282165527344,
1370
+ "loss": 0.5716,
1371
+ "rewards/accuracies": 0.690625011920929,
1372
+ "rewards/chosen": -0.23762881755828857,
1373
+ "rewards/margins": 0.5192852020263672,
1374
+ "rewards/rejected": -0.7569140195846558,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.6326556926118537,
1379
+ "grad_norm": 19.08129134756091,
1380
+ "learning_rate": 4.094728800611154e-07,
1381
+ "logits/chosen": -0.26804298162460327,
1382
+ "logits/rejected": -0.2515054941177368,
1383
+ "logps/chosen": -193.53302001953125,
1384
+ "logps/rejected": -206.30044555664062,
1385
+ "loss": 0.5365,
1386
+ "rewards/accuracies": 0.734375,
1387
+ "rewards/chosen": -0.14372526109218597,
1388
+ "rewards/margins": 0.630579948425293,
1389
+ "rewards/rejected": -0.7743052244186401,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6395323849228521,
1394
+ "grad_norm": 18.703547962812074,
1395
+ "learning_rate": 4.0183346065699004e-07,
1396
+ "logits/chosen": -0.28248900175094604,
1397
+ "logits/rejected": -0.2462625950574875,
1398
+ "logps/chosen": -198.85198974609375,
1399
+ "logps/rejected": -207.67355346679688,
1400
+ "loss": 0.532,
1401
+ "rewards/accuracies": 0.765625,
1402
+ "rewards/chosen": -0.16078238189220428,
1403
+ "rewards/margins": 0.641302227973938,
1404
+ "rewards/rejected": -0.8020846247673035,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.6464090772338505,
1409
+ "grad_norm": 20.41142350877169,
1410
+ "learning_rate": 3.9419404125286475e-07,
1411
+ "logits/chosen": -0.2985166907310486,
1412
+ "logits/rejected": -0.24812492728233337,
1413
+ "logps/chosen": -198.37818908691406,
1414
+ "logps/rejected": -212.2599334716797,
1415
+ "loss": 0.5656,
1416
+ "rewards/accuracies": 0.7203125357627869,
1417
+ "rewards/chosen": -0.16358602046966553,
1418
+ "rewards/margins": 0.5863920450210571,
1419
+ "rewards/rejected": -0.7499780058860779,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.653285769544849,
1424
+ "grad_norm": 21.785705703616344,
1425
+ "learning_rate": 3.865546218487395e-07,
1426
+ "logits/chosen": -0.2526381015777588,
1427
+ "logits/rejected": -0.22008472681045532,
1428
+ "logps/chosen": -204.68231201171875,
1429
+ "logps/rejected": -218.895751953125,
1430
+ "loss": 0.5528,
1431
+ "rewards/accuracies": 0.7171875238418579,
1432
+ "rewards/chosen": -0.3044445514678955,
1433
+ "rewards/margins": 0.6037268042564392,
1434
+ "rewards/rejected": -0.9081714153289795,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6601624618558474,
1439
+ "grad_norm": 22.594441167295546,
1440
+ "learning_rate": 3.789152024446142e-07,
1441
+ "logits/chosen": -0.29777246713638306,
1442
+ "logits/rejected": -0.25746557116508484,
1443
+ "logps/chosen": -195.57608032226562,
1444
+ "logps/rejected": -209.7220001220703,
1445
+ "loss": 0.5699,
1446
+ "rewards/accuracies": 0.7000000476837158,
1447
+ "rewards/chosen": -0.279017835855484,
1448
+ "rewards/margins": 0.5582146644592285,
1449
+ "rewards/rejected": -0.8372325301170349,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6670391541668458,
1454
+ "grad_norm": 21.978835959015736,
1455
+ "learning_rate": 3.712757830404889e-07,
1456
+ "logits/chosen": -0.23148207366466522,
1457
+ "logits/rejected": -0.21888744831085205,
1458
+ "logps/chosen": -205.72726440429688,
1459
+ "logps/rejected": -216.00201416015625,
1460
+ "loss": 0.5486,
1461
+ "rewards/accuracies": 0.7187500596046448,
1462
+ "rewards/chosen": -0.26756638288497925,
1463
+ "rewards/margins": 0.6475085616111755,
1464
+ "rewards/rejected": -0.91507488489151,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6739158464778442,
1469
+ "grad_norm": 22.873802825567996,
1470
+ "learning_rate": 3.636363636363636e-07,
1471
+ "logits/chosen": -0.27247849106788635,
1472
+ "logits/rejected": -0.25619280338287354,
1473
+ "logps/chosen": -194.8944091796875,
1474
+ "logps/rejected": -203.31651306152344,
1475
+ "loss": 0.575,
1476
+ "rewards/accuracies": 0.7078125476837158,
1477
+ "rewards/chosen": -0.3309789299964905,
1478
+ "rewards/margins": 0.5653645396232605,
1479
+ "rewards/rejected": -0.8963434100151062,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6807925387888426,
1484
+ "grad_norm": 20.94553991910371,
1485
+ "learning_rate": 3.5599694423223837e-07,
1486
+ "logits/chosen": -0.25270241498947144,
1487
+ "logits/rejected": -0.22576749324798584,
1488
+ "logps/chosen": -193.91845703125,
1489
+ "logps/rejected": -203.6450653076172,
1490
+ "loss": 0.5489,
1491
+ "rewards/accuracies": 0.714062511920929,
1492
+ "rewards/chosen": -0.29697081446647644,
1493
+ "rewards/margins": 0.5997652411460876,
1494
+ "rewards/rejected": -0.8967360258102417,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.687669231099841,
1499
+ "grad_norm": 22.920934792482523,
1500
+ "learning_rate": 3.4835752482811303e-07,
1501
+ "logits/chosen": -0.32494282722473145,
1502
+ "logits/rejected": -0.3064189851284027,
1503
+ "logps/chosen": -200.22879028320312,
1504
+ "logps/rejected": -213.04481506347656,
1505
+ "loss": 0.5568,
1506
+ "rewards/accuracies": 0.7250000238418579,
1507
+ "rewards/chosen": -0.30442699790000916,
1508
+ "rewards/margins": 0.6327985525131226,
1509
+ "rewards/rejected": -0.9372255802154541,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6945459234108394,
1514
+ "grad_norm": 23.631951442333467,
1515
+ "learning_rate": 3.4071810542398775e-07,
1516
+ "logits/chosen": -0.3100181221961975,
1517
+ "logits/rejected": -0.286727637052536,
1518
+ "logps/chosen": -202.29776000976562,
1519
+ "logps/rejected": -211.05247497558594,
1520
+ "loss": 0.5783,
1521
+ "rewards/accuracies": 0.7125000357627869,
1522
+ "rewards/chosen": -0.36704209446907043,
1523
+ "rewards/margins": 0.5526846051216125,
1524
+ "rewards/rejected": -0.9197267293930054,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.7014226157218378,
1529
+ "grad_norm": 18.747830881340672,
1530
+ "learning_rate": 3.330786860198625e-07,
1531
+ "logits/chosen": -0.27798181772232056,
1532
+ "logits/rejected": -0.21886159479618073,
1533
+ "logps/chosen": -201.37747192382812,
1534
+ "logps/rejected": -218.2301025390625,
1535
+ "loss": 0.515,
1536
+ "rewards/accuracies": 0.7718750238418579,
1537
+ "rewards/chosen": -0.24704203009605408,
1538
+ "rewards/margins": 0.7255524396896362,
1539
+ "rewards/rejected": -0.9725943803787231,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.7082993080328363,
1544
+ "grad_norm": 24.25409543649107,
1545
+ "learning_rate": 3.2543926661573717e-07,
1546
+ "logits/chosen": -0.3029528856277466,
1547
+ "logits/rejected": -0.27629080414772034,
1548
+ "logps/chosen": -193.83238220214844,
1549
+ "logps/rejected": -203.4297332763672,
1550
+ "loss": 0.5566,
1551
+ "rewards/accuracies": 0.6984374523162842,
1552
+ "rewards/chosen": -0.3451562523841858,
1553
+ "rewards/margins": 0.6299095749855042,
1554
+ "rewards/rejected": -0.9750658869743347,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7151760003438347,
1559
+ "grad_norm": 26.97218970153312,
1560
+ "learning_rate": 3.177998472116119e-07,
1561
+ "logits/chosen": -0.29127955436706543,
1562
+ "logits/rejected": -0.25185728073120117,
1563
+ "logps/chosen": -204.32371520996094,
1564
+ "logps/rejected": -221.5450439453125,
1565
+ "loss": 0.5525,
1566
+ "rewards/accuracies": 0.7218750715255737,
1567
+ "rewards/chosen": -0.2525748312473297,
1568
+ "rewards/margins": 0.608326256275177,
1569
+ "rewards/rejected": -0.8609010577201843,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.7220526926548331,
1574
+ "grad_norm": 22.202001277198995,
1575
+ "learning_rate": 3.1016042780748665e-07,
1576
+ "logits/chosen": -0.27952516078948975,
1577
+ "logits/rejected": -0.2715262174606323,
1578
+ "logps/chosen": -195.66201782226562,
1579
+ "logps/rejected": -200.53121948242188,
1580
+ "loss": 0.5688,
1581
+ "rewards/accuracies": 0.7000000476837158,
1582
+ "rewards/chosen": -0.30520564317703247,
1583
+ "rewards/margins": 0.5800539255142212,
1584
+ "rewards/rejected": -0.8852595090866089,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7289293849658315,
1589
+ "grad_norm": 23.68851239947594,
1590
+ "learning_rate": 3.025210084033613e-07,
1591
+ "logits/chosen": -0.3042244613170624,
1592
+ "logits/rejected": -0.26939964294433594,
1593
+ "logps/chosen": -205.25555419921875,
1594
+ "logps/rejected": -215.33792114257812,
1595
+ "loss": 0.5763,
1596
+ "rewards/accuracies": 0.7000000476837158,
1597
+ "rewards/chosen": -0.3437601327896118,
1598
+ "rewards/margins": 0.648766815662384,
1599
+ "rewards/rejected": -0.9925269484519958,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.7358060772768299,
1604
+ "grad_norm": 16.770485849221966,
1605
+ "learning_rate": 2.9488158899923603e-07,
1606
+ "logits/chosen": -0.29478147625923157,
1607
+ "logits/rejected": -0.26331159472465515,
1608
+ "logps/chosen": -191.68682861328125,
1609
+ "logps/rejected": -205.52398681640625,
1610
+ "loss": 0.5514,
1611
+ "rewards/accuracies": 0.721875011920929,
1612
+ "rewards/chosen": -0.22186440229415894,
1613
+ "rewards/margins": 0.6213081479072571,
1614
+ "rewards/rejected": -0.843172550201416,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.7426827695878283,
1619
+ "grad_norm": 21.974491833687136,
1620
+ "learning_rate": 2.872421695951108e-07,
1621
+ "logits/chosen": -0.25364449620246887,
1622
+ "logits/rejected": -0.21501508355140686,
1623
+ "logps/chosen": -185.8372802734375,
1624
+ "logps/rejected": -200.8142547607422,
1625
+ "loss": 0.57,
1626
+ "rewards/accuracies": 0.71875,
1627
+ "rewards/chosen": -0.24050012230873108,
1628
+ "rewards/margins": 0.6280645728111267,
1629
+ "rewards/rejected": -0.8685646653175354,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7495594618988267,
1634
+ "grad_norm": 25.733854858297498,
1635
+ "learning_rate": 2.7960275019098545e-07,
1636
+ "logits/chosen": -0.23827332258224487,
1637
+ "logits/rejected": -0.20885096490383148,
1638
+ "logps/chosen": -203.6381072998047,
1639
+ "logps/rejected": -218.4065704345703,
1640
+ "loss": 0.5452,
1641
+ "rewards/accuracies": 0.7000000476837158,
1642
+ "rewards/chosen": -0.30213844776153564,
1643
+ "rewards/margins": 0.638514518737793,
1644
+ "rewards/rejected": -0.9406529664993286,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7564361542098251,
1649
+ "grad_norm": 20.372146785031376,
1650
+ "learning_rate": 2.7196333078686017e-07,
1651
+ "logits/chosen": -0.2448071539402008,
1652
+ "logits/rejected": -0.2028975635766983,
1653
+ "logps/chosen": -200.01614379882812,
1654
+ "logps/rejected": -211.32534790039062,
1655
+ "loss": 0.545,
1656
+ "rewards/accuracies": 0.745312511920929,
1657
+ "rewards/chosen": -0.30321523547172546,
1658
+ "rewards/margins": 0.6893056631088257,
1659
+ "rewards/rejected": -0.992520809173584,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7633128465208234,
1664
+ "grad_norm": 20.430379953555082,
1665
+ "learning_rate": 2.6432391138273493e-07,
1666
+ "logits/chosen": -0.2551287114620209,
1667
+ "logits/rejected": -0.23339179158210754,
1668
+ "logps/chosen": -199.39683532714844,
1669
+ "logps/rejected": -209.9544219970703,
1670
+ "loss": 0.5584,
1671
+ "rewards/accuracies": 0.7250000238418579,
1672
+ "rewards/chosen": -0.3356574773788452,
1673
+ "rewards/margins": 0.6131808161735535,
1674
+ "rewards/rejected": -0.9488382339477539,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.7701895388318218,
1679
+ "grad_norm": 19.901880078755248,
1680
+ "learning_rate": 2.5668449197860965e-07,
1681
+ "logits/chosen": -0.22213828563690186,
1682
+ "logits/rejected": -0.19397185742855072,
1683
+ "logps/chosen": -199.30223083496094,
1684
+ "logps/rejected": -207.69515991210938,
1685
+ "loss": 0.5507,
1686
+ "rewards/accuracies": 0.7468750476837158,
1687
+ "rewards/chosen": -0.44687339663505554,
1688
+ "rewards/margins": 0.6169517040252686,
1689
+ "rewards/rejected": -1.0638251304626465,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7770662311428203,
1694
+ "grad_norm": 20.24722363911535,
1695
+ "learning_rate": 2.490450725744843e-07,
1696
+ "logits/chosen": -0.2590638995170593,
1697
+ "logits/rejected": -0.22945846617221832,
1698
+ "logps/chosen": -200.342041015625,
1699
+ "logps/rejected": -212.2578582763672,
1700
+ "loss": 0.5454,
1701
+ "rewards/accuracies": 0.7421875596046448,
1702
+ "rewards/chosen": -0.33622223138809204,
1703
+ "rewards/margins": 0.6579465270042419,
1704
+ "rewards/rejected": -0.994168758392334,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7839429234538187,
1709
+ "grad_norm": 21.086214479883942,
1710
+ "learning_rate": 2.41405653170359e-07,
1711
+ "logits/chosen": -0.25024640560150146,
1712
+ "logits/rejected": -0.21352523565292358,
1713
+ "logps/chosen": -200.02767944335938,
1714
+ "logps/rejected": -210.52069091796875,
1715
+ "loss": 0.5633,
1716
+ "rewards/accuracies": 0.7250000238418579,
1717
+ "rewards/chosen": -0.37010401487350464,
1718
+ "rewards/margins": 0.6089712381362915,
1719
+ "rewards/rejected": -0.9790753126144409,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7908196157648171,
1724
+ "grad_norm": 19.875952183889193,
1725
+ "learning_rate": 2.3376623376623376e-07,
1726
+ "logits/chosen": -0.25371620059013367,
1727
+ "logits/rejected": -0.2263481318950653,
1728
+ "logps/chosen": -207.67349243164062,
1729
+ "logps/rejected": -220.63577270507812,
1730
+ "loss": 0.549,
1731
+ "rewards/accuracies": 0.739062488079071,
1732
+ "rewards/chosen": -0.3151938021183014,
1733
+ "rewards/margins": 0.6516591310501099,
1734
+ "rewards/rejected": -0.9668529033660889,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.7976963080758155,
1739
+ "grad_norm": 21.407536980595168,
1740
+ "learning_rate": 2.2612681436210848e-07,
1741
+ "logits/chosen": -0.2808507978916168,
1742
+ "logits/rejected": -0.24474851787090302,
1743
+ "logps/chosen": -198.8170166015625,
1744
+ "logps/rejected": -210.86314392089844,
1745
+ "loss": 0.5541,
1746
+ "rewards/accuracies": 0.714062511920929,
1747
+ "rewards/chosen": -0.32873424887657166,
1748
+ "rewards/margins": 0.6663525700569153,
1749
+ "rewards/rejected": -0.9950867891311646,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.8045730003868139,
1754
+ "grad_norm": 23.50770080300074,
1755
+ "learning_rate": 2.184873949579832e-07,
1756
+ "logits/chosen": -0.30061495304107666,
1757
+ "logits/rejected": -0.26357635855674744,
1758
+ "logps/chosen": -199.1419677734375,
1759
+ "logps/rejected": -214.4768829345703,
1760
+ "loss": 0.5306,
1761
+ "rewards/accuracies": 0.737500011920929,
1762
+ "rewards/chosen": -0.282147616147995,
1763
+ "rewards/margins": 0.6803615093231201,
1764
+ "rewards/rejected": -0.9625092148780823,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.8114496926978123,
1769
+ "grad_norm": 24.155885332906,
1770
+ "learning_rate": 2.1084797555385788e-07,
1771
+ "logits/chosen": -0.24145373702049255,
1772
+ "logits/rejected": -0.20655398070812225,
1773
+ "logps/chosen": -206.26055908203125,
1774
+ "logps/rejected": -213.5579833984375,
1775
+ "loss": 0.5633,
1776
+ "rewards/accuracies": 0.7171875238418579,
1777
+ "rewards/chosen": -0.38221338391304016,
1778
+ "rewards/margins": 0.585527777671814,
1779
+ "rewards/rejected": -0.967741072177887,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8183263850088107,
1784
+ "grad_norm": 18.461701909835007,
1785
+ "learning_rate": 2.0320855614973262e-07,
1786
+ "logits/chosen": -0.29989033937454224,
1787
+ "logits/rejected": -0.2700657546520233,
1788
+ "logps/chosen": -191.77200317382812,
1789
+ "logps/rejected": -204.35777282714844,
1790
+ "loss": 0.5521,
1791
+ "rewards/accuracies": 0.721875011920929,
1792
+ "rewards/chosen": -0.32683059573173523,
1793
+ "rewards/margins": 0.6650609970092773,
1794
+ "rewards/rejected": -0.991891622543335,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.8252030773198091,
1799
+ "grad_norm": 22.37257340571811,
1800
+ "learning_rate": 1.9556913674560733e-07,
1801
+ "logits/chosen": -0.2594352662563324,
1802
+ "logits/rejected": -0.23618939518928528,
1803
+ "logps/chosen": -200.5668487548828,
1804
+ "logps/rejected": -210.00950622558594,
1805
+ "loss": 0.5702,
1806
+ "rewards/accuracies": 0.7093750238418579,
1807
+ "rewards/chosen": -0.31345993280410767,
1808
+ "rewards/margins": 0.6197506785392761,
1809
+ "rewards/rejected": -0.9332106113433838,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.8320797696308075,
1814
+ "grad_norm": 25.224789610569076,
1815
+ "learning_rate": 1.8792971734148204e-07,
1816
+ "logits/chosen": -0.2822037935256958,
1817
+ "logits/rejected": -0.24582624435424805,
1818
+ "logps/chosen": -198.5158233642578,
1819
+ "logps/rejected": -210.27737426757812,
1820
+ "loss": 0.543,
1821
+ "rewards/accuracies": 0.7593749761581421,
1822
+ "rewards/chosen": -0.37239497900009155,
1823
+ "rewards/margins": 0.6797323822975159,
1824
+ "rewards/rejected": -1.0521273612976074,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.838956461941806,
1829
+ "grad_norm": 19.780304476647146,
1830
+ "learning_rate": 1.8029029793735676e-07,
1831
+ "logits/chosen": -0.26471930742263794,
1832
+ "logits/rejected": -0.22956611216068268,
1833
+ "logps/chosen": -197.03468322753906,
1834
+ "logps/rejected": -215.57901000976562,
1835
+ "loss": 0.5449,
1836
+ "rewards/accuracies": 0.739062488079071,
1837
+ "rewards/chosen": -0.34902551770210266,
1838
+ "rewards/margins": 0.6907632350921631,
1839
+ "rewards/rejected": -1.039788842201233,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8458331542528044,
1844
+ "grad_norm": 16.98463514772764,
1845
+ "learning_rate": 1.7265087853323147e-07,
1846
+ "logits/chosen": -0.2907296419143677,
1847
+ "logits/rejected": -0.26648983359336853,
1848
+ "logps/chosen": -199.41156005859375,
1849
+ "logps/rejected": -214.23023986816406,
1850
+ "loss": 0.535,
1851
+ "rewards/accuracies": 0.7484375238418579,
1852
+ "rewards/chosen": -0.3681446313858032,
1853
+ "rewards/margins": 0.6866059899330139,
1854
+ "rewards/rejected": -1.0547505617141724,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.8527098465638028,
1859
+ "grad_norm": 32.69373101278549,
1860
+ "learning_rate": 1.6501145912910618e-07,
1861
+ "logits/chosen": -0.26771843433380127,
1862
+ "logits/rejected": -0.2375665307044983,
1863
+ "logps/chosen": -201.06895446777344,
1864
+ "logps/rejected": -210.51840209960938,
1865
+ "loss": 0.5723,
1866
+ "rewards/accuracies": 0.7109375596046448,
1867
+ "rewards/chosen": -0.38859817385673523,
1868
+ "rewards/margins": 0.6455494165420532,
1869
+ "rewards/rejected": -1.0341476202011108,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8595865388748012,
1874
+ "grad_norm": 20.83949759552536,
1875
+ "learning_rate": 1.573720397249809e-07,
1876
+ "logits/chosen": -0.2752777934074402,
1877
+ "logits/rejected": -0.24270716309547424,
1878
+ "logps/chosen": -200.675537109375,
1879
+ "logps/rejected": -212.04931640625,
1880
+ "loss": 0.5395,
1881
+ "rewards/accuracies": 0.745312511920929,
1882
+ "rewards/chosen": -0.3316209316253662,
1883
+ "rewards/margins": 0.6646089553833008,
1884
+ "rewards/rejected": -0.996229887008667,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8664632311857996,
1889
+ "grad_norm": 24.573642614002804,
1890
+ "learning_rate": 1.4973262032085558e-07,
1891
+ "logits/chosen": -0.2565278112888336,
1892
+ "logits/rejected": -0.21442291140556335,
1893
+ "logps/chosen": -194.88111877441406,
1894
+ "logps/rejected": -210.5688018798828,
1895
+ "loss": 0.5582,
1896
+ "rewards/accuracies": 0.7328125238418579,
1897
+ "rewards/chosen": -0.44270479679107666,
1898
+ "rewards/margins": 0.6293411254882812,
1899
+ "rewards/rejected": -1.0720458030700684,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.873339923496798,
1904
+ "grad_norm": 19.534250584569072,
1905
+ "learning_rate": 1.4209320091673032e-07,
1906
+ "logits/chosen": -0.2922959327697754,
1907
+ "logits/rejected": -0.26131224632263184,
1908
+ "logps/chosen": -194.8395538330078,
1909
+ "logps/rejected": -209.4702911376953,
1910
+ "loss": 0.5413,
1911
+ "rewards/accuracies": 0.7421875,
1912
+ "rewards/chosen": -0.33789727091789246,
1913
+ "rewards/margins": 0.6527694463729858,
1914
+ "rewards/rejected": -0.9906667470932007,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8802166158077964,
1919
+ "grad_norm": 20.717698242318562,
1920
+ "learning_rate": 1.3445378151260504e-07,
1921
+ "logits/chosen": -0.2540634274482727,
1922
+ "logits/rejected": -0.22778739035129547,
1923
+ "logps/chosen": -203.4353790283203,
1924
+ "logps/rejected": -211.83343505859375,
1925
+ "loss": 0.5304,
1926
+ "rewards/accuracies": 0.7390625476837158,
1927
+ "rewards/chosen": -0.3961109519004822,
1928
+ "rewards/margins": 0.6780579686164856,
1929
+ "rewards/rejected": -1.0741689205169678,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8870933081187948,
1934
+ "grad_norm": 23.96725955714651,
1935
+ "learning_rate": 1.2681436210847975e-07,
1936
+ "logits/chosen": -0.25593337416648865,
1937
+ "logits/rejected": -0.21139128506183624,
1938
+ "logps/chosen": -192.81637573242188,
1939
+ "logps/rejected": -207.94528198242188,
1940
+ "loss": 0.5305,
1941
+ "rewards/accuracies": 0.7437500357627869,
1942
+ "rewards/chosen": -0.3916109502315521,
1943
+ "rewards/margins": 0.7024664878845215,
1944
+ "rewards/rejected": -1.0940773487091064,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.8939700004297932,
1949
+ "grad_norm": 21.318918224883497,
1950
+ "learning_rate": 1.1917494270435446e-07,
1951
+ "logits/chosen": -0.2688959538936615,
1952
+ "logits/rejected": -0.256546288728714,
1953
+ "logps/chosen": -194.7349395751953,
1954
+ "logps/rejected": -208.1248016357422,
1955
+ "loss": 0.5503,
1956
+ "rewards/accuracies": 0.7234375476837158,
1957
+ "rewards/chosen": -0.41377341747283936,
1958
+ "rewards/margins": 0.6503176689147949,
1959
+ "rewards/rejected": -1.0640910863876343,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.9008466927407917,
1964
+ "grad_norm": 22.656364712253406,
1965
+ "learning_rate": 1.1153552330022918e-07,
1966
+ "logits/chosen": -0.27973228693008423,
1967
+ "logits/rejected": -0.24073663353919983,
1968
+ "logps/chosen": -197.87957763671875,
1969
+ "logps/rejected": -209.244873046875,
1970
+ "loss": 0.5439,
1971
+ "rewards/accuracies": 0.721875011920929,
1972
+ "rewards/chosen": -0.38405442237854004,
1973
+ "rewards/margins": 0.6377519369125366,
1974
+ "rewards/rejected": -1.0218064785003662,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.9077233850517901,
1979
+ "grad_norm": 23.992301612667553,
1980
+ "learning_rate": 1.038961038961039e-07,
1981
+ "logits/chosen": -0.248391255736351,
1982
+ "logits/rejected": -0.22697040438652039,
1983
+ "logps/chosen": -200.25355529785156,
1984
+ "logps/rejected": -216.67298889160156,
1985
+ "loss": 0.5547,
1986
+ "rewards/accuracies": 0.7437499761581421,
1987
+ "rewards/chosen": -0.36668360233306885,
1988
+ "rewards/margins": 0.6348526477813721,
1989
+ "rewards/rejected": -1.001536250114441,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.9146000773627885,
1994
+ "grad_norm": 23.080807444085426,
1995
+ "learning_rate": 9.62566844919786e-08,
1996
+ "logits/chosen": -0.23448041081428528,
1997
+ "logits/rejected": -0.20045128464698792,
1998
+ "logps/chosen": -200.07498168945312,
1999
+ "logps/rejected": -213.40365600585938,
2000
+ "loss": 0.5265,
2001
+ "rewards/accuracies": 0.7484375238418579,
2002
+ "rewards/chosen": -0.41947588324546814,
2003
+ "rewards/margins": 0.699980616569519,
2004
+ "rewards/rejected": -1.1194565296173096,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9214767696737869,
2009
+ "grad_norm": 27.13497379193061,
2010
+ "learning_rate": 8.861726508785332e-08,
2011
+ "logits/chosen": -0.21386350691318512,
2012
+ "logits/rejected": -0.20405535399913788,
2013
+ "logps/chosen": -207.48922729492188,
2014
+ "logps/rejected": -213.19801330566406,
2015
+ "loss": 0.5739,
2016
+ "rewards/accuracies": 0.7109375596046448,
2017
+ "rewards/chosen": -0.44666141271591187,
2018
+ "rewards/margins": 0.6134124994277954,
2019
+ "rewards/rejected": -1.060073971748352,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9283534619847853,
2024
+ "grad_norm": 26.86309937846046,
2025
+ "learning_rate": 8.097784568372803e-08,
2026
+ "logits/chosen": -0.2825528681278229,
2027
+ "logits/rejected": -0.2349400520324707,
2028
+ "logps/chosen": -213.959716796875,
2029
+ "logps/rejected": -224.0325164794922,
2030
+ "loss": 0.5575,
2031
+ "rewards/accuracies": 0.71875,
2032
+ "rewards/chosen": -0.4003857374191284,
2033
+ "rewards/margins": 0.6982907056808472,
2034
+ "rewards/rejected": -1.0986764430999756,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.9352301542957837,
2039
+ "grad_norm": 29.245122481397317,
2040
+ "learning_rate": 7.333842627960276e-08,
2041
+ "logits/chosen": -0.2761003375053406,
2042
+ "logits/rejected": -0.24544045329093933,
2043
+ "logps/chosen": -202.433349609375,
2044
+ "logps/rejected": -217.29464721679688,
2045
+ "loss": 0.5149,
2046
+ "rewards/accuracies": 0.7828125357627869,
2047
+ "rewards/chosen": -0.4048154354095459,
2048
+ "rewards/margins": 0.7579847574234009,
2049
+ "rewards/rejected": -1.1628003120422363,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.9421068466067821,
2054
+ "grad_norm": 25.075679763670674,
2055
+ "learning_rate": 6.569900687547746e-08,
2056
+ "logits/chosen": -0.2352394461631775,
2057
+ "logits/rejected": -0.2183229774236679,
2058
+ "logps/chosen": -204.6761474609375,
2059
+ "logps/rejected": -213.91024780273438,
2060
+ "loss": 0.5774,
2061
+ "rewards/accuracies": 0.7015625238418579,
2062
+ "rewards/chosen": -0.40830886363983154,
2063
+ "rewards/margins": 0.6113278865814209,
2064
+ "rewards/rejected": -1.0196367502212524,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.9489835389177805,
2069
+ "grad_norm": 20.1424199530658,
2070
+ "learning_rate": 5.805958747135217e-08,
2071
+ "logits/chosen": -0.23637357354164124,
2072
+ "logits/rejected": -0.21443995833396912,
2073
+ "logps/chosen": -196.8209991455078,
2074
+ "logps/rejected": -207.15574645996094,
2075
+ "loss": 0.5551,
2076
+ "rewards/accuracies": 0.723437488079071,
2077
+ "rewards/chosen": -0.4441874027252197,
2078
+ "rewards/margins": 0.6192114353179932,
2079
+ "rewards/rejected": -1.063398838043213,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.955860231228779,
2084
+ "grad_norm": 24.76586280035536,
2085
+ "learning_rate": 5.042016806722689e-08,
2086
+ "logits/chosen": -0.3121819496154785,
2087
+ "logits/rejected": -0.27099382877349854,
2088
+ "logps/chosen": -211.52767944335938,
2089
+ "logps/rejected": -222.99720764160156,
2090
+ "loss": 0.5424,
2091
+ "rewards/accuracies": 0.7109375,
2092
+ "rewards/chosen": -0.30979296565055847,
2093
+ "rewards/margins": 0.7096717953681946,
2094
+ "rewards/rejected": -1.0194647312164307,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9627369235397774,
2099
+ "grad_norm": 20.6202462278239,
2100
+ "learning_rate": 4.27807486631016e-08,
2101
+ "logits/chosen": -0.2805480659008026,
2102
+ "logits/rejected": -0.2518741190433502,
2103
+ "logps/chosen": -191.37855529785156,
2104
+ "logps/rejected": -205.9793243408203,
2105
+ "loss": 0.5252,
2106
+ "rewards/accuracies": 0.753125011920929,
2107
+ "rewards/chosen": -0.3638061285018921,
2108
+ "rewards/margins": 0.6892703771591187,
2109
+ "rewards/rejected": -1.0530765056610107,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9696136158507758,
2114
+ "grad_norm": 19.926765783973885,
2115
+ "learning_rate": 3.514132925897632e-08,
2116
+ "logits/chosen": -0.26876330375671387,
2117
+ "logits/rejected": -0.2324240803718567,
2118
+ "logps/chosen": -198.6615447998047,
2119
+ "logps/rejected": -215.54981994628906,
2120
+ "loss": 0.4938,
2121
+ "rewards/accuracies": 0.792187511920929,
2122
+ "rewards/chosen": -0.3588196635246277,
2123
+ "rewards/margins": 0.786273181438446,
2124
+ "rewards/rejected": -1.1450928449630737,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9764903081617742,
2129
+ "grad_norm": 21.09934832903724,
2130
+ "learning_rate": 2.750190985485103e-08,
2131
+ "logits/chosen": -0.21979323029518127,
2132
+ "logits/rejected": -0.21775387227535248,
2133
+ "logps/chosen": -195.7095489501953,
2134
+ "logps/rejected": -205.0386505126953,
2135
+ "loss": 0.5229,
2136
+ "rewards/accuracies": 0.7468750476837158,
2137
+ "rewards/chosen": -0.38191646337509155,
2138
+ "rewards/margins": 0.7250487804412842,
2139
+ "rewards/rejected": -1.1069653034210205,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.9833670004727726,
2144
+ "grad_norm": 19.940200333703277,
2145
+ "learning_rate": 1.9862490450725743e-08,
2146
+ "logits/chosen": -0.26562875509262085,
2147
+ "logits/rejected": -0.25526827573776245,
2148
+ "logps/chosen": -199.0478515625,
2149
+ "logps/rejected": -207.9737548828125,
2150
+ "loss": 0.5485,
2151
+ "rewards/accuracies": 0.7250000238418579,
2152
+ "rewards/chosen": -0.4320859909057617,
2153
+ "rewards/margins": 0.6247124671936035,
2154
+ "rewards/rejected": -1.0567984580993652,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.990243692783771,
2159
+ "grad_norm": 21.44789284674788,
2160
+ "learning_rate": 1.2223071046600458e-08,
2161
+ "logits/chosen": -0.2715289294719696,
2162
+ "logits/rejected": -0.23082685470581055,
2163
+ "logps/chosen": -205.08152770996094,
2164
+ "logps/rejected": -217.8182830810547,
2165
+ "loss": 0.5481,
2166
+ "rewards/accuracies": 0.715624988079071,
2167
+ "rewards/chosen": -0.3798620104789734,
2168
+ "rewards/margins": 0.667800784111023,
2169
+ "rewards/rejected": -1.0476627349853516,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 0.9971203850947694,
2174
+ "grad_norm": 22.623803658983302,
2175
+ "learning_rate": 4.583651642475172e-09,
2176
+ "logits/chosen": -0.2716945707798004,
2177
+ "logits/rejected": -0.24027717113494873,
2178
+ "logps/chosen": -204.0220947265625,
2179
+ "logps/rejected": -215.94126892089844,
2180
+ "loss": 0.5363,
2181
+ "rewards/accuracies": 0.7593749761581421,
2182
+ "rewards/chosen": -0.4140065610408783,
2183
+ "rewards/margins": 0.6608986258506775,
2184
+ "rewards/rejected": -1.0749050378799438,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 1.0,
2189
+ "step": 1455,
2190
+ "total_flos": 160109785841664.0,
2191
+ "train_loss": 0.5976956401903605,
2192
+ "train_runtime": 14200.9584,
2193
+ "train_samples_per_second": 6.554,
2194
+ "train_steps_per_second": 0.102
2195
+ }
2196
+ ],
2197
+ "logging_steps": 10,
2198
+ "max_steps": 1455,
2199
+ "num_input_tokens_seen": 0,
2200
+ "num_train_epochs": 1,
2201
+ "save_steps": 500,
2202
+ "stateful_callbacks": {
2203
+ "TrainerControl": {
2204
+ "args": {
2205
+ "should_epoch_stop": false,
2206
+ "should_evaluate": false,
2207
+ "should_log": false,
2208
+ "should_save": true,
2209
+ "should_training_stop": true
2210
+ },
2211
+ "attributes": {}
2212
+ }
2213
+ },
2214
+ "total_flos": 160109785841664.0,
2215
+ "train_batch_size": 1,
2216
+ "trial_name": null,
2217
+ "trial_params": null
2218
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fed9008baf1493b0518247b719e6911a73a5a7de180055b9b171248354fd2d14
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED