davidanugraha commited on
Commit
243745f
·
verified ·
1 Parent(s): d1c24f6

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_rmr1_32b
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_rmr1_32b
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_rmr1_32b dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160003043033088.0,
4
+ "train_loss": 0.538088473287122,
5
+ "train_runtime": 14420.7196,
6
+ "train_samples_per_second": 6.434,
7
+ "train_steps_per_second": 0.101
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0309e64337d8f8a49f67cfddec880339680eaf725d67b637ce9b075857d04f16
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de9d25faf481e3e3edc7c01994753fd392765d6900ca4baeb68816a5c2728c8e
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 160003043033088.0,
4
+ "train_loss": 0.538088473287122,
5
+ "train_runtime": 14420.7196,
6
+ "train_samples_per_second": 6.434,
7
+ "train_steps_per_second": 0.101
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1450, "loss": 0.694, "accuracy": 0.3765624761581421, "lr": 6.206896551724137e-08, "epoch": 0.006898038370338435, "percentage": 0.69, "elapsed_time": "0:01:41", "remaining_time": "4:03:23"}
2
+ {"current_steps": 20, "total_steps": 1450, "loss": 0.6924, "accuracy": 0.518750011920929, "lr": 1.310344827586207e-07, "epoch": 0.01379607674067687, "percentage": 1.38, "elapsed_time": "0:03:20", "remaining_time": "3:59:03"}
3
+ {"current_steps": 30, "total_steps": 1450, "loss": 0.6928, "accuracy": 0.4921875298023224, "lr": 2e-07, "epoch": 0.020694115111015304, "percentage": 2.07, "elapsed_time": "0:04:56", "remaining_time": "3:53:55"}
4
+ {"current_steps": 40, "total_steps": 1450, "loss": 0.6927, "accuracy": 0.5140625238418579, "lr": 2.689655172413793e-07, "epoch": 0.02759215348135374, "percentage": 2.76, "elapsed_time": "0:06:31", "remaining_time": "3:50:07"}
5
+ {"current_steps": 50, "total_steps": 1450, "loss": 0.6921, "accuracy": 0.535937488079071, "lr": 3.379310344827586e-07, "epoch": 0.03449019185169218, "percentage": 3.45, "elapsed_time": "0:08:10", "remaining_time": "3:48:50"}
6
+ {"current_steps": 60, "total_steps": 1450, "loss": 0.6928, "accuracy": 0.48125001788139343, "lr": 4.068965517241379e-07, "epoch": 0.04138823022203061, "percentage": 4.14, "elapsed_time": "0:09:50", "remaining_time": "3:47:59"}
7
+ {"current_steps": 70, "total_steps": 1450, "loss": 0.6914, "accuracy": 0.53125, "lr": 4.7586206896551725e-07, "epoch": 0.04828626859236904, "percentage": 4.83, "elapsed_time": "0:11:31", "remaining_time": "3:47:05"}
8
+ {"current_steps": 80, "total_steps": 1450, "loss": 0.6903, "accuracy": 0.5453125238418579, "lr": 5.448275862068966e-07, "epoch": 0.05518430696270748, "percentage": 5.52, "elapsed_time": "0:13:09", "remaining_time": "3:45:13"}
9
+ {"current_steps": 90, "total_steps": 1450, "loss": 0.6886, "accuracy": 0.574999988079071, "lr": 6.137931034482758e-07, "epoch": 0.062082345333045914, "percentage": 6.21, "elapsed_time": "0:14:47", "remaining_time": "3:43:31"}
10
+ {"current_steps": 100, "total_steps": 1450, "loss": 0.6884, "accuracy": 0.590624988079071, "lr": 6.827586206896552e-07, "epoch": 0.06898038370338436, "percentage": 6.9, "elapsed_time": "0:16:29", "remaining_time": "3:42:41"}
11
+ {"current_steps": 110, "total_steps": 1450, "loss": 0.6858, "accuracy": 0.582812488079071, "lr": 7.517241379310344e-07, "epoch": 0.07587842207372278, "percentage": 7.59, "elapsed_time": "0:18:09", "remaining_time": "3:41:06"}
12
+ {"current_steps": 120, "total_steps": 1450, "loss": 0.6836, "accuracy": 0.590624988079071, "lr": 8.206896551724138e-07, "epoch": 0.08277646044406121, "percentage": 8.28, "elapsed_time": "0:19:47", "remaining_time": "3:39:25"}
13
+ {"current_steps": 130, "total_steps": 1450, "loss": 0.683, "accuracy": 0.6000000238418579, "lr": 8.896551724137931e-07, "epoch": 0.08967449881439965, "percentage": 8.97, "elapsed_time": "0:21:26", "remaining_time": "3:37:47"}
14
+ {"current_steps": 140, "total_steps": 1450, "loss": 0.6762, "accuracy": 0.6265625357627869, "lr": 9.586206896551724e-07, "epoch": 0.09657253718473809, "percentage": 9.66, "elapsed_time": "0:23:02", "remaining_time": "3:35:33"}
15
+ {"current_steps": 150, "total_steps": 1450, "loss": 0.6703, "accuracy": 0.6578125357627869, "lr": 9.969348659003832e-07, "epoch": 0.10347057555507652, "percentage": 10.34, "elapsed_time": "0:24:38", "remaining_time": "3:33:34"}
16
+ {"current_steps": 160, "total_steps": 1450, "loss": 0.6615, "accuracy": 0.6421875357627869, "lr": 9.89272030651341e-07, "epoch": 0.11036861392541496, "percentage": 11.03, "elapsed_time": "0:26:16", "remaining_time": "3:31:53"}
17
+ {"current_steps": 170, "total_steps": 1450, "loss": 0.6544, "accuracy": 0.676562488079071, "lr": 9.816091954022988e-07, "epoch": 0.11726665229575339, "percentage": 11.72, "elapsed_time": "0:28:01", "remaining_time": "3:31:01"}
18
+ {"current_steps": 180, "total_steps": 1450, "loss": 0.6512, "accuracy": 0.65625, "lr": 9.739463601532568e-07, "epoch": 0.12416469066609183, "percentage": 12.41, "elapsed_time": "0:29:39", "remaining_time": "3:29:16"}
19
+ {"current_steps": 190, "total_steps": 1450, "loss": 0.655, "accuracy": 0.651562511920929, "lr": 9.662835249042146e-07, "epoch": 0.13106272903643026, "percentage": 13.1, "elapsed_time": "0:31:16", "remaining_time": "3:27:23"}
20
+ {"current_steps": 200, "total_steps": 1450, "loss": 0.6477, "accuracy": 0.6390625238418579, "lr": 9.586206896551724e-07, "epoch": 0.13796076740676871, "percentage": 13.79, "elapsed_time": "0:32:59", "remaining_time": "3:26:09"}
21
+ {"current_steps": 210, "total_steps": 1450, "loss": 0.6582, "accuracy": 0.6093750596046448, "lr": 9.509578544061302e-07, "epoch": 0.14485880577710714, "percentage": 14.48, "elapsed_time": "0:34:37", "remaining_time": "3:24:27"}
22
+ {"current_steps": 220, "total_steps": 1450, "loss": 0.6466, "accuracy": 0.6843750476837158, "lr": 9.43295019157088e-07, "epoch": 0.15175684414744556, "percentage": 15.17, "elapsed_time": "0:36:15", "remaining_time": "3:22:45"}
23
+ {"current_steps": 230, "total_steps": 1450, "loss": 0.6306, "accuracy": 0.651562511920929, "lr": 9.356321839080458e-07, "epoch": 0.158654882517784, "percentage": 15.86, "elapsed_time": "0:37:53", "remaining_time": "3:21:01"}
24
+ {"current_steps": 240, "total_steps": 1450, "loss": 0.6324, "accuracy": 0.65625, "lr": 9.279693486590037e-07, "epoch": 0.16555292088812243, "percentage": 16.55, "elapsed_time": "0:39:36", "remaining_time": "3:19:39"}
25
+ {"current_steps": 250, "total_steps": 1450, "loss": 0.6316, "accuracy": 0.6640625596046448, "lr": 9.203065134099616e-07, "epoch": 0.17245095925846088, "percentage": 17.24, "elapsed_time": "0:41:14", "remaining_time": "3:17:55"}
26
+ {"current_steps": 260, "total_steps": 1450, "loss": 0.6387, "accuracy": 0.6484375, "lr": 9.126436781609194e-07, "epoch": 0.1793489976287993, "percentage": 17.93, "elapsed_time": "0:43:22", "remaining_time": "3:18:31"}
27
+ {"current_steps": 270, "total_steps": 1450, "loss": 0.6228, "accuracy": 0.6796875, "lr": 9.049808429118773e-07, "epoch": 0.18624703599913775, "percentage": 18.62, "elapsed_time": "0:45:04", "remaining_time": "3:16:57"}
28
+ {"current_steps": 280, "total_steps": 1450, "loss": 0.6144, "accuracy": 0.6843750476837158, "lr": 8.973180076628351e-07, "epoch": 0.19314507436947617, "percentage": 19.31, "elapsed_time": "0:46:50", "remaining_time": "3:15:43"}
29
+ {"current_steps": 290, "total_steps": 1450, "loss": 0.6053, "accuracy": 0.6890625357627869, "lr": 8.896551724137931e-07, "epoch": 0.20004311273981462, "percentage": 20.0, "elapsed_time": "0:48:30", "remaining_time": "3:14:03"}
30
+ {"current_steps": 300, "total_steps": 1450, "loss": 0.6032, "accuracy": 0.6812500357627869, "lr": 8.81992337164751e-07, "epoch": 0.20694115111015304, "percentage": 20.69, "elapsed_time": "0:50:11", "remaining_time": "3:12:25"}
31
+ {"current_steps": 310, "total_steps": 1450, "loss": 0.6074, "accuracy": 0.6875, "lr": 8.743295019157088e-07, "epoch": 0.2138391894804915, "percentage": 21.38, "elapsed_time": "0:51:49", "remaining_time": "3:10:34"}
32
+ {"current_steps": 320, "total_steps": 1450, "loss": 0.6047, "accuracy": 0.7000000476837158, "lr": 8.666666666666667e-07, "epoch": 0.22073722785082991, "percentage": 22.07, "elapsed_time": "0:53:28", "remaining_time": "3:08:50"}
33
+ {"current_steps": 330, "total_steps": 1450, "loss": 0.6019, "accuracy": 0.6734375357627869, "lr": 8.590038314176245e-07, "epoch": 0.22763526622116836, "percentage": 22.76, "elapsed_time": "0:55:05", "remaining_time": "3:07:00"}
34
+ {"current_steps": 340, "total_steps": 1450, "loss": 0.6038, "accuracy": 0.6765625476837158, "lr": 8.513409961685824e-07, "epoch": 0.23453330459150679, "percentage": 23.45, "elapsed_time": "0:56:43", "remaining_time": "3:05:10"}
35
+ {"current_steps": 350, "total_steps": 1450, "loss": 0.5838, "accuracy": 0.7093750238418579, "lr": 8.436781609195402e-07, "epoch": 0.24143134296184524, "percentage": 24.14, "elapsed_time": "0:58:19", "remaining_time": "3:03:19"}
36
+ {"current_steps": 360, "total_steps": 1450, "loss": 0.5962, "accuracy": 0.682812511920929, "lr": 8.360153256704981e-07, "epoch": 0.24832938133218366, "percentage": 24.83, "elapsed_time": "0:59:57", "remaining_time": "3:01:33"}
37
+ {"current_steps": 370, "total_steps": 1450, "loss": 0.5912, "accuracy": 0.6859375238418579, "lr": 8.28352490421456e-07, "epoch": 0.2552274197025221, "percentage": 25.52, "elapsed_time": "1:01:36", "remaining_time": "2:59:49"}
38
+ {"current_steps": 380, "total_steps": 1450, "loss": 0.5811, "accuracy": 0.7000000476837158, "lr": 8.206896551724138e-07, "epoch": 0.26212545807286053, "percentage": 26.21, "elapsed_time": "1:03:12", "remaining_time": "2:57:59"}
39
+ {"current_steps": 390, "total_steps": 1450, "loss": 0.5902, "accuracy": 0.7000000476837158, "lr": 8.130268199233717e-07, "epoch": 0.26902349644319895, "percentage": 26.9, "elapsed_time": "1:04:53", "remaining_time": "2:56:23"}
40
+ {"current_steps": 400, "total_steps": 1450, "loss": 0.5922, "accuracy": 0.699999988079071, "lr": 8.053639846743294e-07, "epoch": 0.27592153481353743, "percentage": 27.59, "elapsed_time": "1:06:35", "remaining_time": "2:54:47"}
41
+ {"current_steps": 410, "total_steps": 1450, "loss": 0.5902, "accuracy": 0.6937500238418579, "lr": 7.977011494252873e-07, "epoch": 0.28281957318387585, "percentage": 28.28, "elapsed_time": "1:08:20", "remaining_time": "2:53:20"}
42
+ {"current_steps": 420, "total_steps": 1450, "loss": 0.5791, "accuracy": 0.6843750476837158, "lr": 7.900383141762452e-07, "epoch": 0.28971761155421427, "percentage": 28.97, "elapsed_time": "1:09:55", "remaining_time": "2:51:27"}
43
+ {"current_steps": 430, "total_steps": 1450, "loss": 0.5384, "accuracy": 0.7375000715255737, "lr": 7.82375478927203e-07, "epoch": 0.2966156499245527, "percentage": 29.66, "elapsed_time": "1:11:32", "remaining_time": "2:49:42"}
44
+ {"current_steps": 440, "total_steps": 1450, "loss": 0.5753, "accuracy": 0.7046875357627869, "lr": 7.747126436781609e-07, "epoch": 0.3035136882948911, "percentage": 30.34, "elapsed_time": "1:13:10", "remaining_time": "2:47:57"}
45
+ {"current_steps": 450, "total_steps": 1450, "loss": 0.5473, "accuracy": 0.7406250238418579, "lr": 7.670498084291187e-07, "epoch": 0.3104117266652296, "percentage": 31.03, "elapsed_time": "1:14:49", "remaining_time": "2:46:15"}
46
+ {"current_steps": 460, "total_steps": 1450, "loss": 0.5984, "accuracy": 0.7140625715255737, "lr": 7.593869731800766e-07, "epoch": 0.317309765035568, "percentage": 31.72, "elapsed_time": "1:16:22", "remaining_time": "2:44:22"}
47
+ {"current_steps": 470, "total_steps": 1450, "loss": 0.552, "accuracy": 0.7234375476837158, "lr": 7.517241379310344e-07, "epoch": 0.32420780340590644, "percentage": 32.41, "elapsed_time": "1:18:01", "remaining_time": "2:42:41"}
48
+ {"current_steps": 480, "total_steps": 1450, "loss": 0.5457, "accuracy": 0.7156250476837158, "lr": 7.440613026819923e-07, "epoch": 0.33110584177624486, "percentage": 33.1, "elapsed_time": "1:19:38", "remaining_time": "2:40:55"}
49
+ {"current_steps": 490, "total_steps": 1450, "loss": 0.5569, "accuracy": 0.7093750238418579, "lr": 7.363984674329502e-07, "epoch": 0.33800388014658334, "percentage": 33.79, "elapsed_time": "1:21:18", "remaining_time": "2:39:18"}
50
+ {"current_steps": 500, "total_steps": 1450, "loss": 0.5488, "accuracy": 0.7265625, "lr": 7.28735632183908e-07, "epoch": 0.34490191851692176, "percentage": 34.48, "elapsed_time": "1:23:01", "remaining_time": "2:37:44"}
51
+ {"current_steps": 510, "total_steps": 1450, "loss": 0.568, "accuracy": 0.6859375238418579, "lr": 7.210727969348659e-07, "epoch": 0.3517999568872602, "percentage": 35.17, "elapsed_time": "1:25:08", "remaining_time": "2:36:54"}
52
+ {"current_steps": 520, "total_steps": 1450, "loss": 0.5676, "accuracy": 0.723437488079071, "lr": 7.134099616858237e-07, "epoch": 0.3586979952575986, "percentage": 35.86, "elapsed_time": "1:26:50", "remaining_time": "2:35:18"}
53
+ {"current_steps": 530, "total_steps": 1450, "loss": 0.5301, "accuracy": 0.7234375476837158, "lr": 7.057471264367816e-07, "epoch": 0.3655960336279371, "percentage": 36.55, "elapsed_time": "1:28:29", "remaining_time": "2:33:36"}
54
+ {"current_steps": 540, "total_steps": 1450, "loss": 0.5549, "accuracy": 0.737500011920929, "lr": 6.980842911877395e-07, "epoch": 0.3724940719982755, "percentage": 37.24, "elapsed_time": "1:30:11", "remaining_time": "2:31:58"}
55
+ {"current_steps": 550, "total_steps": 1450, "loss": 0.5197, "accuracy": 0.734375, "lr": 6.904214559386973e-07, "epoch": 0.3793921103686139, "percentage": 37.93, "elapsed_time": "1:31:43", "remaining_time": "2:30:06"}
56
+ {"current_steps": 560, "total_steps": 1450, "loss": 0.5603, "accuracy": 0.7312500476837158, "lr": 6.827586206896552e-07, "epoch": 0.38629014873895234, "percentage": 38.62, "elapsed_time": "1:33:19", "remaining_time": "2:28:18"}
57
+ {"current_steps": 570, "total_steps": 1450, "loss": 0.5324, "accuracy": 0.7406250238418579, "lr": 6.750957854406129e-07, "epoch": 0.3931881871092908, "percentage": 39.31, "elapsed_time": "1:34:57", "remaining_time": "2:26:36"}
58
+ {"current_steps": 580, "total_steps": 1450, "loss": 0.5603, "accuracy": 0.7265625, "lr": 6.674329501915708e-07, "epoch": 0.40008622547962924, "percentage": 40.0, "elapsed_time": "1:36:35", "remaining_time": "2:24:53"}
59
+ {"current_steps": 590, "total_steps": 1450, "loss": 0.5281, "accuracy": 0.7437500357627869, "lr": 6.597701149425286e-07, "epoch": 0.40698426384996766, "percentage": 40.69, "elapsed_time": "1:38:10", "remaining_time": "2:23:06"}
60
+ {"current_steps": 600, "total_steps": 1450, "loss": 0.5235, "accuracy": 0.729687511920929, "lr": 6.521072796934865e-07, "epoch": 0.4138823022203061, "percentage": 41.38, "elapsed_time": "1:39:49", "remaining_time": "2:21:25"}
61
+ {"current_steps": 610, "total_steps": 1450, "loss": 0.5239, "accuracy": 0.7515624761581421, "lr": 6.444444444444444e-07, "epoch": 0.4207803405906445, "percentage": 42.07, "elapsed_time": "1:41:24", "remaining_time": "2:19:38"}
62
+ {"current_steps": 620, "total_steps": 1450, "loss": 0.5296, "accuracy": 0.753125011920929, "lr": 6.367816091954022e-07, "epoch": 0.427678378960983, "percentage": 42.76, "elapsed_time": "1:43:01", "remaining_time": "2:17:55"}
63
+ {"current_steps": 630, "total_steps": 1450, "loss": 0.5234, "accuracy": 0.7515624761581421, "lr": 6.291187739463601e-07, "epoch": 0.4345764173313214, "percentage": 43.45, "elapsed_time": "1:44:42", "remaining_time": "2:16:17"}
64
+ {"current_steps": 640, "total_steps": 1450, "loss": 0.5165, "accuracy": 0.7406250238418579, "lr": 6.214559386973179e-07, "epoch": 0.44147445570165983, "percentage": 44.14, "elapsed_time": "1:46:26", "remaining_time": "2:14:42"}
65
+ {"current_steps": 650, "total_steps": 1450, "loss": 0.5388, "accuracy": 0.7250000238418579, "lr": 6.137931034482758e-07, "epoch": 0.44837249407199825, "percentage": 44.83, "elapsed_time": "1:48:02", "remaining_time": "2:12:58"}
66
+ {"current_steps": 660, "total_steps": 1450, "loss": 0.5558, "accuracy": 0.745312511920929, "lr": 6.061302681992337e-07, "epoch": 0.45527053244233673, "percentage": 45.52, "elapsed_time": "1:49:41", "remaining_time": "2:11:17"}
67
+ {"current_steps": 670, "total_steps": 1450, "loss": 0.5195, "accuracy": 0.760937511920929, "lr": 5.984674329501915e-07, "epoch": 0.46216857081267515, "percentage": 46.21, "elapsed_time": "1:51:15", "remaining_time": "2:09:31"}
68
+ {"current_steps": 680, "total_steps": 1450, "loss": 0.5034, "accuracy": 0.7640625238418579, "lr": 5.908045977011494e-07, "epoch": 0.46906660918301357, "percentage": 46.9, "elapsed_time": "1:52:49", "remaining_time": "2:07:45"}
69
+ {"current_steps": 690, "total_steps": 1450, "loss": 0.5545, "accuracy": 0.71875, "lr": 5.831417624521072e-07, "epoch": 0.475964647553352, "percentage": 47.59, "elapsed_time": "1:54:30", "remaining_time": "2:06:07"}
70
+ {"current_steps": 700, "total_steps": 1450, "loss": 0.5363, "accuracy": 0.739062488079071, "lr": 5.754789272030651e-07, "epoch": 0.48286268592369047, "percentage": 48.28, "elapsed_time": "1:56:06", "remaining_time": "2:04:24"}
71
+ {"current_steps": 710, "total_steps": 1450, "loss": 0.5603, "accuracy": 0.6921875476837158, "lr": 5.678160919540229e-07, "epoch": 0.4897607242940289, "percentage": 48.97, "elapsed_time": "1:57:42", "remaining_time": "2:02:40"}
72
+ {"current_steps": 720, "total_steps": 1450, "loss": 0.5005, "accuracy": 0.768750011920929, "lr": 5.601532567049809e-07, "epoch": 0.4966587626643673, "percentage": 49.66, "elapsed_time": "1:59:16", "remaining_time": "2:00:55"}
73
+ {"current_steps": 730, "total_steps": 1450, "loss": 0.5219, "accuracy": 0.745312511920929, "lr": 5.524904214559388e-07, "epoch": 0.5035568010347058, "percentage": 50.34, "elapsed_time": "2:00:55", "remaining_time": "1:59:16"}
74
+ {"current_steps": 740, "total_steps": 1450, "loss": 0.4984, "accuracy": 0.746874988079071, "lr": 5.448275862068966e-07, "epoch": 0.5104548394050442, "percentage": 51.03, "elapsed_time": "2:02:30", "remaining_time": "1:57:32"}
75
+ {"current_steps": 750, "total_steps": 1450, "loss": 0.4902, "accuracy": 0.7843749523162842, "lr": 5.371647509578544e-07, "epoch": 0.5173528777753826, "percentage": 51.72, "elapsed_time": "2:04:09", "remaining_time": "1:55:53"}
76
+ {"current_steps": 760, "total_steps": 1450, "loss": 0.511, "accuracy": 0.762499988079071, "lr": 5.295019157088122e-07, "epoch": 0.5242509161457211, "percentage": 52.41, "elapsed_time": "2:06:15", "remaining_time": "1:54:37"}
77
+ {"current_steps": 770, "total_steps": 1450, "loss": 0.4868, "accuracy": 0.7749999761581421, "lr": 5.218390804597701e-07, "epoch": 0.5311489545160595, "percentage": 53.1, "elapsed_time": "2:07:55", "remaining_time": "1:52:58"}
78
+ {"current_steps": 780, "total_steps": 1450, "loss": 0.5033, "accuracy": 0.745312511920929, "lr": 5.14176245210728e-07, "epoch": 0.5380469928863979, "percentage": 53.79, "elapsed_time": "2:09:34", "remaining_time": "1:51:17"}
79
+ {"current_steps": 790, "total_steps": 1450, "loss": 0.4779, "accuracy": 0.768750011920929, "lr": 5.065134099616858e-07, "epoch": 0.5449450312567363, "percentage": 54.48, "elapsed_time": "2:11:08", "remaining_time": "1:49:33"}
80
+ {"current_steps": 800, "total_steps": 1450, "loss": 0.5141, "accuracy": 0.746874988079071, "lr": 4.988505747126436e-07, "epoch": 0.5518430696270749, "percentage": 55.17, "elapsed_time": "2:12:42", "remaining_time": "1:47:49"}
81
+ {"current_steps": 810, "total_steps": 1450, "loss": 0.5401, "accuracy": 0.7671875357627869, "lr": 4.911877394636015e-07, "epoch": 0.5587411079974133, "percentage": 55.86, "elapsed_time": "2:14:23", "remaining_time": "1:46:11"}
82
+ {"current_steps": 820, "total_steps": 1450, "loss": 0.4873, "accuracy": 0.78125, "lr": 4.835249042145594e-07, "epoch": 0.5656391463677517, "percentage": 56.55, "elapsed_time": "2:15:59", "remaining_time": "1:44:28"}
83
+ {"current_steps": 830, "total_steps": 1450, "loss": 0.503, "accuracy": 0.7593750357627869, "lr": 4.7586206896551725e-07, "epoch": 0.5725371847380901, "percentage": 57.24, "elapsed_time": "2:17:40", "remaining_time": "1:42:50"}
84
+ {"current_steps": 840, "total_steps": 1450, "loss": 0.5211, "accuracy": 0.7500000596046448, "lr": 4.681992337164751e-07, "epoch": 0.5794352231084285, "percentage": 57.93, "elapsed_time": "2:19:20", "remaining_time": "1:41:11"}
85
+ {"current_steps": 850, "total_steps": 1450, "loss": 0.481, "accuracy": 0.7749999761581421, "lr": 4.6053639846743294e-07, "epoch": 0.586333261478767, "percentage": 58.62, "elapsed_time": "2:20:59", "remaining_time": "1:39:31"}
86
+ {"current_steps": 860, "total_steps": 1450, "loss": 0.4794, "accuracy": 0.778124988079071, "lr": 4.528735632183908e-07, "epoch": 0.5932312998491054, "percentage": 59.31, "elapsed_time": "2:22:31", "remaining_time": "1:37:46"}
87
+ {"current_steps": 870, "total_steps": 1450, "loss": 0.4731, "accuracy": 0.776562511920929, "lr": 4.452107279693487e-07, "epoch": 0.6001293382194438, "percentage": 60.0, "elapsed_time": "2:24:13", "remaining_time": "1:36:09"}
88
+ {"current_steps": 880, "total_steps": 1450, "loss": 0.5277, "accuracy": 0.7562499642372131, "lr": 4.375478927203065e-07, "epoch": 0.6070273765897822, "percentage": 60.69, "elapsed_time": "2:25:52", "remaining_time": "1:34:29"}
89
+ {"current_steps": 890, "total_steps": 1450, "loss": 0.4904, "accuracy": 0.78125, "lr": 4.2988505747126437e-07, "epoch": 0.6139254149601208, "percentage": 61.38, "elapsed_time": "2:27:29", "remaining_time": "1:32:48"}
90
+ {"current_steps": 900, "total_steps": 1450, "loss": 0.4955, "accuracy": 0.765625, "lr": 4.222222222222222e-07, "epoch": 0.6208234533304592, "percentage": 62.07, "elapsed_time": "2:29:11", "remaining_time": "1:31:10"}
91
+ {"current_steps": 910, "total_steps": 1450, "loss": 0.4906, "accuracy": 0.7687499523162842, "lr": 4.1455938697318005e-07, "epoch": 0.6277214917007976, "percentage": 62.76, "elapsed_time": "2:30:47", "remaining_time": "1:29:28"}
92
+ {"current_steps": 920, "total_steps": 1450, "loss": 0.4652, "accuracy": 0.8046875, "lr": 4.068965517241379e-07, "epoch": 0.634619530071136, "percentage": 63.45, "elapsed_time": "2:32:26", "remaining_time": "1:27:49"}
93
+ {"current_steps": 930, "total_steps": 1450, "loss": 0.4609, "accuracy": 0.792187511920929, "lr": 3.992337164750958e-07, "epoch": 0.6415175684414745, "percentage": 64.14, "elapsed_time": "2:34:03", "remaining_time": "1:26:08"}
94
+ {"current_steps": 940, "total_steps": 1450, "loss": 0.4956, "accuracy": 0.7671875357627869, "lr": 3.9157088122605364e-07, "epoch": 0.6484156068118129, "percentage": 64.83, "elapsed_time": "2:35:43", "remaining_time": "1:24:29"}
95
+ {"current_steps": 950, "total_steps": 1450, "loss": 0.4729, "accuracy": 0.776562511920929, "lr": 3.839080459770115e-07, "epoch": 0.6553136451821513, "percentage": 65.52, "elapsed_time": "2:37:20", "remaining_time": "1:22:48"}
96
+ {"current_steps": 960, "total_steps": 1450, "loss": 0.4797, "accuracy": 0.7734375, "lr": 3.762452107279693e-07, "epoch": 0.6622116835524897, "percentage": 66.21, "elapsed_time": "2:38:57", "remaining_time": "1:21:07"}
97
+ {"current_steps": 970, "total_steps": 1450, "loss": 0.4687, "accuracy": 0.7875000238418579, "lr": 3.6858237547892717e-07, "epoch": 0.6691097219228282, "percentage": 66.9, "elapsed_time": "2:40:33", "remaining_time": "1:19:27"}
98
+ {"current_steps": 980, "total_steps": 1450, "loss": 0.4689, "accuracy": 0.7890625596046448, "lr": 3.60919540229885e-07, "epoch": 0.6760077602931667, "percentage": 67.59, "elapsed_time": "2:42:10", "remaining_time": "1:17:46"}
99
+ {"current_steps": 990, "total_steps": 1450, "loss": 0.4946, "accuracy": 0.776562511920929, "lr": 3.532567049808429e-07, "epoch": 0.6829057986635051, "percentage": 68.28, "elapsed_time": "2:43:50", "remaining_time": "1:16:07"}
100
+ {"current_steps": 1000, "total_steps": 1450, "loss": 0.4856, "accuracy": 0.7718750238418579, "lr": 3.4559386973180075e-07, "epoch": 0.6898038370338435, "percentage": 68.97, "elapsed_time": "2:45:25", "remaining_time": "1:14:26"}
101
+ {"current_steps": 1010, "total_steps": 1450, "loss": 0.4788, "accuracy": 0.785937488079071, "lr": 3.379310344827586e-07, "epoch": 0.6967018754041819, "percentage": 69.66, "elapsed_time": "2:47:35", "remaining_time": "1:13:00"}
102
+ {"current_steps": 1020, "total_steps": 1450, "loss": 0.4723, "accuracy": 0.7656250596046448, "lr": 3.3026819923371644e-07, "epoch": 0.7035999137745204, "percentage": 70.34, "elapsed_time": "2:49:17", "remaining_time": "1:11:21"}
103
+ {"current_steps": 1030, "total_steps": 1450, "loss": 0.4899, "accuracy": 0.784375011920929, "lr": 3.226053639846743e-07, "epoch": 0.7104979521448588, "percentage": 71.03, "elapsed_time": "2:50:49", "remaining_time": "1:09:39"}
104
+ {"current_steps": 1040, "total_steps": 1450, "loss": 0.4572, "accuracy": 0.7859375476837158, "lr": 3.149425287356321e-07, "epoch": 0.7173959905151972, "percentage": 71.72, "elapsed_time": "2:52:27", "remaining_time": "1:07:59"}
105
+ {"current_steps": 1050, "total_steps": 1450, "loss": 0.476, "accuracy": 0.7953125238418579, "lr": 3.0727969348659e-07, "epoch": 0.7242940288855356, "percentage": 72.41, "elapsed_time": "2:54:08", "remaining_time": "1:06:20"}
106
+ {"current_steps": 1060, "total_steps": 1450, "loss": 0.4797, "accuracy": 0.7859375476837158, "lr": 2.996168582375479e-07, "epoch": 0.7311920672558742, "percentage": 73.1, "elapsed_time": "2:55:53", "remaining_time": "1:04:42"}
107
+ {"current_steps": 1070, "total_steps": 1450, "loss": 0.4584, "accuracy": 0.8046875, "lr": 2.9195402298850576e-07, "epoch": 0.7380901056262126, "percentage": 73.79, "elapsed_time": "2:57:30", "remaining_time": "1:03:02"}
108
+ {"current_steps": 1080, "total_steps": 1450, "loss": 0.4752, "accuracy": 0.7843750715255737, "lr": 2.842911877394636e-07, "epoch": 0.744988143996551, "percentage": 74.48, "elapsed_time": "2:59:10", "remaining_time": "1:01:23"}
109
+ {"current_steps": 1090, "total_steps": 1450, "loss": 0.4815, "accuracy": 0.768750011920929, "lr": 2.7662835249042145e-07, "epoch": 0.7518861823668894, "percentage": 75.17, "elapsed_time": "3:00:46", "remaining_time": "0:59:42"}
110
+ {"current_steps": 1100, "total_steps": 1450, "loss": 0.488, "accuracy": 0.7750000357627869, "lr": 2.689655172413793e-07, "epoch": 0.7587842207372278, "percentage": 75.86, "elapsed_time": "3:02:25", "remaining_time": "0:58:02"}
111
+ {"current_steps": 1110, "total_steps": 1450, "loss": 0.4684, "accuracy": 0.8015625476837158, "lr": 2.6130268199233714e-07, "epoch": 0.7656822591075663, "percentage": 76.55, "elapsed_time": "3:04:00", "remaining_time": "0:56:21"}
112
+ {"current_steps": 1120, "total_steps": 1450, "loss": 0.4579, "accuracy": 0.784375011920929, "lr": 2.5363984674329503e-07, "epoch": 0.7725802974779047, "percentage": 77.24, "elapsed_time": "3:05:37", "remaining_time": "0:54:41"}
113
+ {"current_steps": 1130, "total_steps": 1450, "loss": 0.4743, "accuracy": 0.768750011920929, "lr": 2.459770114942529e-07, "epoch": 0.7794783358482431, "percentage": 77.93, "elapsed_time": "3:07:13", "remaining_time": "0:53:01"}
114
+ {"current_steps": 1140, "total_steps": 1450, "loss": 0.4685, "accuracy": 0.7750000357627869, "lr": 2.3831417624521072e-07, "epoch": 0.7863763742185816, "percentage": 78.62, "elapsed_time": "3:08:50", "remaining_time": "0:51:21"}
115
+ {"current_steps": 1150, "total_steps": 1450, "loss": 0.4579, "accuracy": 0.7984375357627869, "lr": 2.3065134099616856e-07, "epoch": 0.7932744125889201, "percentage": 79.31, "elapsed_time": "3:10:24", "remaining_time": "0:49:40"}
116
+ {"current_steps": 1160, "total_steps": 1450, "loss": 0.4805, "accuracy": 0.7578125, "lr": 2.2298850574712643e-07, "epoch": 0.8001724509592585, "percentage": 80.0, "elapsed_time": "3:12:00", "remaining_time": "0:48:00"}
117
+ {"current_steps": 1170, "total_steps": 1450, "loss": 0.4705, "accuracy": 0.778124988079071, "lr": 2.1532567049808428e-07, "epoch": 0.8070704893295969, "percentage": 80.69, "elapsed_time": "3:13:36", "remaining_time": "0:46:20"}
118
+ {"current_steps": 1180, "total_steps": 1450, "loss": 0.4357, "accuracy": 0.7984375357627869, "lr": 2.0766283524904212e-07, "epoch": 0.8139685276999353, "percentage": 81.38, "elapsed_time": "3:15:14", "remaining_time": "0:44:40"}
119
+ {"current_steps": 1190, "total_steps": 1450, "loss": 0.4544, "accuracy": 0.8109375238418579, "lr": 2e-07, "epoch": 0.8208665660702738, "percentage": 82.07, "elapsed_time": "3:16:52", "remaining_time": "0:43:00"}
120
+ {"current_steps": 1200, "total_steps": 1450, "loss": 0.4769, "accuracy": 0.7718750238418579, "lr": 1.9233716475095783e-07, "epoch": 0.8277646044406122, "percentage": 82.76, "elapsed_time": "3:18:26", "remaining_time": "0:41:20"}
121
+ {"current_steps": 1210, "total_steps": 1450, "loss": 0.4646, "accuracy": 0.7437500357627869, "lr": 1.846743295019157e-07, "epoch": 0.8346626428109506, "percentage": 83.45, "elapsed_time": "3:20:07", "remaining_time": "0:39:41"}
122
+ {"current_steps": 1220, "total_steps": 1450, "loss": 0.4633, "accuracy": 0.7984375357627869, "lr": 1.7701149425287357e-07, "epoch": 0.841560681181289, "percentage": 84.14, "elapsed_time": "3:21:47", "remaining_time": "0:38:02"}
123
+ {"current_steps": 1230, "total_steps": 1450, "loss": 0.4738, "accuracy": 0.7687499523162842, "lr": 1.6934865900383142e-07, "epoch": 0.8484587195516275, "percentage": 84.83, "elapsed_time": "3:23:28", "remaining_time": "0:36:23"}
124
+ {"current_steps": 1240, "total_steps": 1450, "loss": 0.4701, "accuracy": 0.7750000357627869, "lr": 1.6168582375478926e-07, "epoch": 0.855356757921966, "percentage": 85.52, "elapsed_time": "3:25:06", "remaining_time": "0:34:44"}
125
+ {"current_steps": 1250, "total_steps": 1450, "loss": 0.4404, "accuracy": 0.7859375476837158, "lr": 1.5402298850574713e-07, "epoch": 0.8622547962923044, "percentage": 86.21, "elapsed_time": "3:26:47", "remaining_time": "0:33:05"}
126
+ {"current_steps": 1260, "total_steps": 1450, "loss": 0.4612, "accuracy": 0.768750011920929, "lr": 1.4636015325670498e-07, "epoch": 0.8691528346626428, "percentage": 86.9, "elapsed_time": "3:28:51", "remaining_time": "0:31:29"}
127
+ {"current_steps": 1270, "total_steps": 1450, "loss": 0.4529, "accuracy": 0.796875, "lr": 1.3869731800766282e-07, "epoch": 0.8760508730329812, "percentage": 87.59, "elapsed_time": "3:30:27", "remaining_time": "0:29:49"}
128
+ {"current_steps": 1280, "total_steps": 1450, "loss": 0.4947, "accuracy": 0.7828125357627869, "lr": 1.310344827586207e-07, "epoch": 0.8829489114033197, "percentage": 88.28, "elapsed_time": "3:32:07", "remaining_time": "0:28:10"}
129
+ {"current_steps": 1290, "total_steps": 1450, "loss": 0.4644, "accuracy": 0.796875, "lr": 1.2337164750957853e-07, "epoch": 0.8898469497736581, "percentage": 88.97, "elapsed_time": "3:33:48", "remaining_time": "0:26:31"}
130
+ {"current_steps": 1300, "total_steps": 1450, "loss": 0.4417, "accuracy": 0.792187511920929, "lr": 1.1570881226053639e-07, "epoch": 0.8967449881439965, "percentage": 89.66, "elapsed_time": "3:35:24", "remaining_time": "0:24:51"}
131
+ {"current_steps": 1310, "total_steps": 1450, "loss": 0.4529, "accuracy": 0.7718750238418579, "lr": 1.0804597701149425e-07, "epoch": 0.903643026514335, "percentage": 90.34, "elapsed_time": "3:36:57", "remaining_time": "0:23:11"}
132
+ {"current_steps": 1320, "total_steps": 1450, "loss": 0.4465, "accuracy": 0.7906249761581421, "lr": 1.003831417624521e-07, "epoch": 0.9105410648846735, "percentage": 91.03, "elapsed_time": "3:38:37", "remaining_time": "0:21:31"}
133
+ {"current_steps": 1330, "total_steps": 1450, "loss": 0.5007, "accuracy": 0.7593750357627869, "lr": 9.272030651340995e-08, "epoch": 0.9174391032550119, "percentage": 91.72, "elapsed_time": "3:40:13", "remaining_time": "0:19:52"}
134
+ {"current_steps": 1340, "total_steps": 1450, "loss": 0.4432, "accuracy": 0.7828124761581421, "lr": 8.505747126436782e-08, "epoch": 0.9243371416253503, "percentage": 92.41, "elapsed_time": "3:41:51", "remaining_time": "0:18:12"}
135
+ {"current_steps": 1350, "total_steps": 1450, "loss": 0.496, "accuracy": 0.78125, "lr": 7.739463601532567e-08, "epoch": 0.9312351799956887, "percentage": 93.1, "elapsed_time": "3:43:31", "remaining_time": "0:16:33"}
136
+ {"current_steps": 1360, "total_steps": 1450, "loss": 0.4914, "accuracy": 0.7593749761581421, "lr": 6.973180076628352e-08, "epoch": 0.9381332183660271, "percentage": 93.79, "elapsed_time": "3:45:18", "remaining_time": "0:14:54"}
137
+ {"current_steps": 1370, "total_steps": 1450, "loss": 0.4409, "accuracy": 0.815625011920929, "lr": 6.206896551724137e-08, "epoch": 0.9450312567363656, "percentage": 94.48, "elapsed_time": "3:46:56", "remaining_time": "0:13:15"}
138
+ {"current_steps": 1380, "total_steps": 1450, "loss": 0.4695, "accuracy": 0.7796875238418579, "lr": 5.440613026819923e-08, "epoch": 0.951929295106704, "percentage": 95.17, "elapsed_time": "3:48:35", "remaining_time": "0:11:35"}
139
+ {"current_steps": 1390, "total_steps": 1450, "loss": 0.4592, "accuracy": 0.7999999523162842, "lr": 4.674329501915709e-08, "epoch": 0.9588273334770425, "percentage": 95.86, "elapsed_time": "3:50:14", "remaining_time": "0:09:56"}
140
+ {"current_steps": 1400, "total_steps": 1450, "loss": 0.4819, "accuracy": 0.7718749642372131, "lr": 3.908045977011494e-08, "epoch": 0.9657253718473809, "percentage": 96.55, "elapsed_time": "3:51:52", "remaining_time": "0:08:16"}
141
+ {"current_steps": 1410, "total_steps": 1450, "loss": 0.4469, "accuracy": 0.8203125, "lr": 3.1417624521072795e-08, "epoch": 0.9726234102177194, "percentage": 97.24, "elapsed_time": "3:53:26", "remaining_time": "0:06:37"}
142
+ {"current_steps": 1420, "total_steps": 1450, "loss": 0.4339, "accuracy": 0.7921875715255737, "lr": 2.375478927203065e-08, "epoch": 0.9795214485880578, "percentage": 97.93, "elapsed_time": "3:55:01", "remaining_time": "0:04:57"}
143
+ {"current_steps": 1430, "total_steps": 1450, "loss": 0.4703, "accuracy": 0.7953125238418579, "lr": 1.6091954022988505e-08, "epoch": 0.9864194869583962, "percentage": 98.62, "elapsed_time": "3:56:41", "remaining_time": "0:03:18"}
144
+ {"current_steps": 1440, "total_steps": 1450, "loss": 0.4753, "accuracy": 0.7593750357627869, "lr": 8.429118773946359e-09, "epoch": 0.9933175253287346, "percentage": 99.31, "elapsed_time": "3:58:19", "remaining_time": "0:01:39"}
145
+ {"current_steps": 1450, "total_steps": 1450, "loss": 0.4402, "accuracy": 0.7774193286895752, "lr": 7.662835249042145e-10, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "3:59:53", "remaining_time": "0:00:00"}
146
+ {"current_steps": 1450, "total_steps": 1450, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "4:00:20", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1450,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.006898038370338435,
14
+ "grad_norm": 19.705646458861423,
15
+ "learning_rate": 6.206896551724137e-08,
16
+ "logits/chosen": 0.36835142970085144,
17
+ "logits/rejected": 0.37963682413101196,
18
+ "logps/chosen": -187.7964324951172,
19
+ "logps/rejected": -188.8426971435547,
20
+ "loss": 0.694,
21
+ "rewards/accuracies": 0.3765624761581421,
22
+ "rewards/chosen": 0.00014640908921137452,
23
+ "rewards/margins": -0.0013930138666182756,
24
+ "rewards/rejected": 0.001539423130452633,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.01379607674067687,
29
+ "grad_norm": 20.359376341203163,
30
+ "learning_rate": 1.310344827586207e-07,
31
+ "logits/chosen": 0.346253365278244,
32
+ "logits/rejected": 0.39457663893699646,
33
+ "logps/chosen": -198.4833526611328,
34
+ "logps/rejected": -201.30184936523438,
35
+ "loss": 0.6924,
36
+ "rewards/accuracies": 0.518750011920929,
37
+ "rewards/chosen": 0.0007789735682308674,
38
+ "rewards/margins": 0.002007370116189122,
39
+ "rewards/rejected": -0.001228396431542933,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.020694115111015304,
44
+ "grad_norm": 19.26655335814601,
45
+ "learning_rate": 2e-07,
46
+ "logits/chosen": 0.3383290767669678,
47
+ "logits/rejected": 0.3465532064437866,
48
+ "logps/chosen": -188.64797973632812,
49
+ "logps/rejected": -191.93255615234375,
50
+ "loss": 0.6928,
51
+ "rewards/accuracies": 0.4921875298023224,
52
+ "rewards/chosen": 0.00016563231474719942,
53
+ "rewards/margins": 0.001079646055586636,
54
+ "rewards/rejected": -0.0009140136535279453,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.02759215348135374,
59
+ "grad_norm": 19.461533122560674,
60
+ "learning_rate": 2.689655172413793e-07,
61
+ "logits/chosen": 0.3220195174217224,
62
+ "logits/rejected": 0.352189302444458,
63
+ "logps/chosen": -193.79917907714844,
64
+ "logps/rejected": -195.8463592529297,
65
+ "loss": 0.6927,
66
+ "rewards/accuracies": 0.5140625238418579,
67
+ "rewards/chosen": 0.0018630579579621553,
68
+ "rewards/margins": 0.0012647934490814805,
69
+ "rewards/rejected": 0.0005982647417113185,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.03449019185169218,
74
+ "grad_norm": 18.709946510342135,
75
+ "learning_rate": 3.379310344827586e-07,
76
+ "logits/chosen": 0.3111805021762848,
77
+ "logits/rejected": 0.3789227604866028,
78
+ "logps/chosen": -193.026611328125,
79
+ "logps/rejected": -194.10079956054688,
80
+ "loss": 0.6921,
81
+ "rewards/accuracies": 0.535937488079071,
82
+ "rewards/chosen": -4.348141374066472e-06,
83
+ "rewards/margins": 0.002440733602270484,
84
+ "rewards/rejected": -0.0024450814817100763,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.04138823022203061,
89
+ "grad_norm": 20.819341516167324,
90
+ "learning_rate": 4.068965517241379e-07,
91
+ "logits/chosen": 0.2869677245616913,
92
+ "logits/rejected": 0.34675154089927673,
93
+ "logps/chosen": -193.4478759765625,
94
+ "logps/rejected": -193.21920776367188,
95
+ "loss": 0.6928,
96
+ "rewards/accuracies": 0.48125001788139343,
97
+ "rewards/chosen": 0.002217940054833889,
98
+ "rewards/margins": 0.0011762925423681736,
99
+ "rewards/rejected": 0.001041647745296359,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.04828626859236904,
104
+ "grad_norm": 20.28793702047738,
105
+ "learning_rate": 4.7586206896551725e-07,
106
+ "logits/chosen": 0.34730735421180725,
107
+ "logits/rejected": 0.3767208456993103,
108
+ "logps/chosen": -194.2237091064453,
109
+ "logps/rejected": -196.96975708007812,
110
+ "loss": 0.6914,
111
+ "rewards/accuracies": 0.53125,
112
+ "rewards/chosen": 0.0017953569767996669,
113
+ "rewards/margins": 0.0039480566047132015,
114
+ "rewards/rejected": -0.002152699278667569,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.05518430696270748,
119
+ "grad_norm": 22.20708731778355,
120
+ "learning_rate": 5.448275862068966e-07,
121
+ "logits/chosen": 0.3007983863353729,
122
+ "logits/rejected": 0.34858226776123047,
123
+ "logps/chosen": -196.1852569580078,
124
+ "logps/rejected": -200.59390258789062,
125
+ "loss": 0.6903,
126
+ "rewards/accuracies": 0.5453125238418579,
127
+ "rewards/chosen": 0.0038398741744458675,
128
+ "rewards/margins": 0.006094089709222317,
129
+ "rewards/rejected": -0.0022542153019458055,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.062082345333045914,
134
+ "grad_norm": 21.294776767220753,
135
+ "learning_rate": 6.137931034482758e-07,
136
+ "logits/chosen": 0.37359899282455444,
137
+ "logits/rejected": 0.42109134793281555,
138
+ "logps/chosen": -188.8787841796875,
139
+ "logps/rejected": -192.15109252929688,
140
+ "loss": 0.6886,
141
+ "rewards/accuracies": 0.574999988079071,
142
+ "rewards/chosen": 0.0028547337278723717,
143
+ "rewards/margins": 0.009716801345348358,
144
+ "rewards/rejected": -0.0068620676174759865,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.06898038370338436,
149
+ "grad_norm": 20.217604268172767,
150
+ "learning_rate": 6.827586206896552e-07,
151
+ "logits/chosen": 0.3505266308784485,
152
+ "logits/rejected": 0.350009024143219,
153
+ "logps/chosen": -196.3862762451172,
154
+ "logps/rejected": -201.52877807617188,
155
+ "loss": 0.6884,
156
+ "rewards/accuracies": 0.590624988079071,
157
+ "rewards/chosen": -0.0006909169023856521,
158
+ "rewards/margins": 0.010285623371601105,
159
+ "rewards/rejected": -0.010976539924740791,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07587842207372278,
164
+ "grad_norm": 21.217963404784477,
165
+ "learning_rate": 7.517241379310344e-07,
166
+ "logits/chosen": 0.33910346031188965,
167
+ "logits/rejected": 0.36434462666511536,
168
+ "logps/chosen": -188.9788360595703,
169
+ "logps/rejected": -190.0298309326172,
170
+ "loss": 0.6858,
171
+ "rewards/accuracies": 0.582812488079071,
172
+ "rewards/chosen": -0.0009895700495690107,
173
+ "rewards/margins": 0.015926657244563103,
174
+ "rewards/rejected": -0.016916226595640182,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.08277646044406121,
179
+ "grad_norm": 21.31628057119455,
180
+ "learning_rate": 8.206896551724138e-07,
181
+ "logits/chosen": 0.34675049781799316,
182
+ "logits/rejected": 0.3582940101623535,
183
+ "logps/chosen": -187.06553649902344,
184
+ "logps/rejected": -194.0469207763672,
185
+ "loss": 0.6836,
186
+ "rewards/accuracies": 0.590624988079071,
187
+ "rewards/chosen": -0.003323513315990567,
188
+ "rewards/margins": 0.020813049748539925,
189
+ "rewards/rejected": -0.024136561900377274,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08967449881439965,
194
+ "grad_norm": 18.756215965537713,
195
+ "learning_rate": 8.896551724137931e-07,
196
+ "logits/chosen": 0.34192919731140137,
197
+ "logits/rejected": 0.3638426661491394,
198
+ "logps/chosen": -186.26242065429688,
199
+ "logps/rejected": -192.37039184570312,
200
+ "loss": 0.683,
201
+ "rewards/accuracies": 0.6000000238418579,
202
+ "rewards/chosen": -0.019915325567126274,
203
+ "rewards/margins": 0.022372394800186157,
204
+ "rewards/rejected": -0.04228772222995758,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09657253718473809,
209
+ "grad_norm": 21.634404693009117,
210
+ "learning_rate": 9.586206896551724e-07,
211
+ "logits/chosen": 0.355460524559021,
212
+ "logits/rejected": 0.3625187277793884,
213
+ "logps/chosen": -195.02239990234375,
214
+ "logps/rejected": -198.76058959960938,
215
+ "loss": 0.6762,
216
+ "rewards/accuracies": 0.6265625357627869,
217
+ "rewards/chosen": -0.029798131436109543,
218
+ "rewards/margins": 0.03854268416762352,
219
+ "rewards/rejected": -0.06834081560373306,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.10347057555507652,
224
+ "grad_norm": 21.48669810581556,
225
+ "learning_rate": 9.969348659003832e-07,
226
+ "logits/chosen": 0.374904990196228,
227
+ "logits/rejected": 0.361512690782547,
228
+ "logps/chosen": -193.9099884033203,
229
+ "logps/rejected": -205.7383270263672,
230
+ "loss": 0.6703,
231
+ "rewards/accuracies": 0.6578125357627869,
232
+ "rewards/chosen": -0.05483241379261017,
233
+ "rewards/margins": 0.051752641797065735,
234
+ "rewards/rejected": -0.1065850630402565,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.11036861392541496,
239
+ "grad_norm": 22.595411287408094,
240
+ "learning_rate": 9.89272030651341e-07,
241
+ "logits/chosen": 0.37401390075683594,
242
+ "logits/rejected": 0.3333270847797394,
243
+ "logps/chosen": -201.05963134765625,
244
+ "logps/rejected": -208.31690979003906,
245
+ "loss": 0.6615,
246
+ "rewards/accuracies": 0.6421875357627869,
247
+ "rewards/chosen": -0.08352852612733841,
248
+ "rewards/margins": 0.07251986861228943,
249
+ "rewards/rejected": -0.15604838728904724,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.11726665229575339,
254
+ "grad_norm": 21.048436327513958,
255
+ "learning_rate": 9.816091954022988e-07,
256
+ "logits/chosen": 0.3516397178173065,
257
+ "logits/rejected": 0.38395583629608154,
258
+ "logps/chosen": -195.45803833007812,
259
+ "logps/rejected": -202.01185607910156,
260
+ "loss": 0.6544,
261
+ "rewards/accuracies": 0.676562488079071,
262
+ "rewards/chosen": -0.1025937870144844,
263
+ "rewards/margins": 0.09426098316907883,
264
+ "rewards/rejected": -0.19685477018356323,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.12416469066609183,
269
+ "grad_norm": 21.699149046811495,
270
+ "learning_rate": 9.739463601532568e-07,
271
+ "logits/chosen": 0.3513750731945038,
272
+ "logits/rejected": 0.3493267297744751,
273
+ "logps/chosen": -199.25662231445312,
274
+ "logps/rejected": -204.52728271484375,
275
+ "loss": 0.6512,
276
+ "rewards/accuracies": 0.65625,
277
+ "rewards/chosen": -0.12922349572181702,
278
+ "rewards/margins": 0.10247690975666046,
279
+ "rewards/rejected": -0.23170042037963867,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.13106272903643026,
284
+ "grad_norm": 22.02241376896688,
285
+ "learning_rate": 9.662835249042146e-07,
286
+ "logits/chosen": 0.3174246847629547,
287
+ "logits/rejected": 0.3541577160358429,
288
+ "logps/chosen": -198.79998779296875,
289
+ "logps/rejected": -206.08731079101562,
290
+ "loss": 0.655,
291
+ "rewards/accuracies": 0.651562511920929,
292
+ "rewards/chosen": -0.15049652755260468,
293
+ "rewards/margins": 0.10282564908266068,
294
+ "rewards/rejected": -0.25332218408584595,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.13796076740676871,
299
+ "grad_norm": 19.928303872875155,
300
+ "learning_rate": 9.586206896551724e-07,
301
+ "logits/chosen": 0.2792571485042572,
302
+ "logits/rejected": 0.3482253849506378,
303
+ "logps/chosen": -196.61712646484375,
304
+ "logps/rejected": -194.27162170410156,
305
+ "loss": 0.6477,
306
+ "rewards/accuracies": 0.6390625238418579,
307
+ "rewards/chosen": -0.16095106303691864,
308
+ "rewards/margins": 0.12415014207363129,
309
+ "rewards/rejected": -0.2851012051105499,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.14485880577710714,
314
+ "grad_norm": 24.92460334180119,
315
+ "learning_rate": 9.509578544061302e-07,
316
+ "logits/chosen": 0.30972471833229065,
317
+ "logits/rejected": 0.3551430404186249,
318
+ "logps/chosen": -197.7263946533203,
319
+ "logps/rejected": -201.88555908203125,
320
+ "loss": 0.6582,
321
+ "rewards/accuracies": 0.6093750596046448,
322
+ "rewards/chosen": -0.18753336369991302,
323
+ "rewards/margins": 0.11249043047428131,
324
+ "rewards/rejected": -0.30002379417419434,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.15175684414744556,
329
+ "grad_norm": 22.79442118870038,
330
+ "learning_rate": 9.43295019157088e-07,
331
+ "logits/chosen": 0.3017464578151703,
332
+ "logits/rejected": 0.352105975151062,
333
+ "logps/chosen": -189.9367218017578,
334
+ "logps/rejected": -192.67840576171875,
335
+ "loss": 0.6466,
336
+ "rewards/accuracies": 0.6843750476837158,
337
+ "rewards/chosen": -0.20621374249458313,
338
+ "rewards/margins": 0.1424286663532257,
339
+ "rewards/rejected": -0.34864240884780884,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.158654882517784,
344
+ "grad_norm": 21.705464795180195,
345
+ "learning_rate": 9.356321839080458e-07,
346
+ "logits/chosen": 0.2900081276893616,
347
+ "logits/rejected": 0.3325185179710388,
348
+ "logps/chosen": -192.8509979248047,
349
+ "logps/rejected": -200.14572143554688,
350
+ "loss": 0.6306,
351
+ "rewards/accuracies": 0.651562511920929,
352
+ "rewards/chosen": -0.20791535079479218,
353
+ "rewards/margins": 0.17991559207439423,
354
+ "rewards/rejected": -0.3878309428691864,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.16555292088812243,
359
+ "grad_norm": 20.441645860866057,
360
+ "learning_rate": 9.279693486590037e-07,
361
+ "logits/chosen": 0.2543826401233673,
362
+ "logits/rejected": 0.2842496633529663,
363
+ "logps/chosen": -199.53659057617188,
364
+ "logps/rejected": -206.34771728515625,
365
+ "loss": 0.6324,
366
+ "rewards/accuracies": 0.65625,
367
+ "rewards/chosen": -0.26963937282562256,
368
+ "rewards/margins": 0.18849027156829834,
369
+ "rewards/rejected": -0.4581296145915985,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17245095925846088,
374
+ "grad_norm": 22.57587749280274,
375
+ "learning_rate": 9.203065134099616e-07,
376
+ "logits/chosen": 0.26432597637176514,
377
+ "logits/rejected": 0.31688380241394043,
378
+ "logps/chosen": -199.59571838378906,
379
+ "logps/rejected": -207.96337890625,
380
+ "loss": 0.6316,
381
+ "rewards/accuracies": 0.6640625596046448,
382
+ "rewards/chosen": -0.2876093089580536,
383
+ "rewards/margins": 0.19110387563705444,
384
+ "rewards/rejected": -0.47871318459510803,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.1793489976287993,
389
+ "grad_norm": 21.526505800441665,
390
+ "learning_rate": 9.126436781609194e-07,
391
+ "logits/chosen": 0.3052281141281128,
392
+ "logits/rejected": 0.31890809535980225,
393
+ "logps/chosen": -190.8103790283203,
394
+ "logps/rejected": -197.42758178710938,
395
+ "loss": 0.6387,
396
+ "rewards/accuracies": 0.6484375,
397
+ "rewards/chosen": -0.3205464780330658,
398
+ "rewards/margins": 0.189984530210495,
399
+ "rewards/rejected": -0.5105310082435608,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.18624703599913775,
404
+ "grad_norm": 24.080067203690575,
405
+ "learning_rate": 9.049808429118773e-07,
406
+ "logits/chosen": 0.32106345891952515,
407
+ "logits/rejected": 0.3163641095161438,
408
+ "logps/chosen": -193.92506408691406,
409
+ "logps/rejected": -199.49679565429688,
410
+ "loss": 0.6228,
411
+ "rewards/accuracies": 0.6796875,
412
+ "rewards/chosen": -0.3592388927936554,
413
+ "rewards/margins": 0.21608832478523254,
414
+ "rewards/rejected": -0.5753272771835327,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19314507436947617,
419
+ "grad_norm": 22.934282465553025,
420
+ "learning_rate": 8.973180076628351e-07,
421
+ "logits/chosen": 0.29369789361953735,
422
+ "logits/rejected": 0.29175543785095215,
423
+ "logps/chosen": -202.85293579101562,
424
+ "logps/rejected": -212.9384765625,
425
+ "loss": 0.6144,
426
+ "rewards/accuracies": 0.6843750476837158,
427
+ "rewards/chosen": -0.37743711471557617,
428
+ "rewards/margins": 0.27061423659324646,
429
+ "rewards/rejected": -0.6480513215065002,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.20004311273981462,
434
+ "grad_norm": 25.800311412575947,
435
+ "learning_rate": 8.896551724137931e-07,
436
+ "logits/chosen": 0.2707730233669281,
437
+ "logits/rejected": 0.3287985324859619,
438
+ "logps/chosen": -193.82411193847656,
439
+ "logps/rejected": -205.94606018066406,
440
+ "loss": 0.6053,
441
+ "rewards/accuracies": 0.6890625357627869,
442
+ "rewards/chosen": -0.40112268924713135,
443
+ "rewards/margins": 0.27477550506591797,
444
+ "rewards/rejected": -0.6758981943130493,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.20694115111015304,
449
+ "grad_norm": 25.421379038162744,
450
+ "learning_rate": 8.81992337164751e-07,
451
+ "logits/chosen": 0.2867887020111084,
452
+ "logits/rejected": 0.2730118930339813,
453
+ "logps/chosen": -211.45458984375,
454
+ "logps/rejected": -224.3688507080078,
455
+ "loss": 0.6032,
456
+ "rewards/accuracies": 0.6812500357627869,
457
+ "rewards/chosen": -0.4323672950267792,
458
+ "rewards/margins": 0.311962366104126,
459
+ "rewards/rejected": -0.7443296909332275,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.2138391894804915,
464
+ "grad_norm": 23.357935776008553,
465
+ "learning_rate": 8.743295019157088e-07,
466
+ "logits/chosen": 0.30384495854377747,
467
+ "logits/rejected": 0.31480729579925537,
468
+ "logps/chosen": -197.1667022705078,
469
+ "logps/rejected": -206.29144287109375,
470
+ "loss": 0.6074,
471
+ "rewards/accuracies": 0.6875,
472
+ "rewards/chosen": -0.49985194206237793,
473
+ "rewards/margins": 0.2933136820793152,
474
+ "rewards/rejected": -0.7931656241416931,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.22073722785082991,
479
+ "grad_norm": 20.985500688578327,
480
+ "learning_rate": 8.666666666666667e-07,
481
+ "logits/chosen": 0.32146862149238586,
482
+ "logits/rejected": 0.38823962211608887,
483
+ "logps/chosen": -196.0064697265625,
484
+ "logps/rejected": -210.33905029296875,
485
+ "loss": 0.6047,
486
+ "rewards/accuracies": 0.7000000476837158,
487
+ "rewards/chosen": -0.5386682748794556,
488
+ "rewards/margins": 0.3213097155094147,
489
+ "rewards/rejected": -0.8599780201911926,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.22763526622116836,
494
+ "grad_norm": 19.782650147196403,
495
+ "learning_rate": 8.590038314176245e-07,
496
+ "logits/chosen": 0.3865478038787842,
497
+ "logits/rejected": 0.4090644121170044,
498
+ "logps/chosen": -204.63607788085938,
499
+ "logps/rejected": -212.3304443359375,
500
+ "loss": 0.6019,
501
+ "rewards/accuracies": 0.6734375357627869,
502
+ "rewards/chosen": -0.6245216131210327,
503
+ "rewards/margins": 0.36608007550239563,
504
+ "rewards/rejected": -0.990601658821106,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.23453330459150679,
509
+ "grad_norm": 22.660878566995944,
510
+ "learning_rate": 8.513409961685824e-07,
511
+ "logits/chosen": 0.33020085096359253,
512
+ "logits/rejected": 0.3953076899051666,
513
+ "logps/chosen": -198.2565460205078,
514
+ "logps/rejected": -208.3692169189453,
515
+ "loss": 0.6038,
516
+ "rewards/accuracies": 0.6765625476837158,
517
+ "rewards/chosen": -0.5995591878890991,
518
+ "rewards/margins": 0.3347458839416504,
519
+ "rewards/rejected": -0.9343050718307495,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.24143134296184524,
524
+ "grad_norm": 23.697173711397785,
525
+ "learning_rate": 8.436781609195402e-07,
526
+ "logits/chosen": 0.2802509069442749,
527
+ "logits/rejected": 0.33361196517944336,
528
+ "logps/chosen": -199.09048461914062,
529
+ "logps/rejected": -209.76051330566406,
530
+ "loss": 0.5838,
531
+ "rewards/accuracies": 0.7093750238418579,
532
+ "rewards/chosen": -0.5859872102737427,
533
+ "rewards/margins": 0.37135303020477295,
534
+ "rewards/rejected": -0.9573402404785156,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.24832938133218366,
539
+ "grad_norm": 21.021696982453527,
540
+ "learning_rate": 8.360153256704981e-07,
541
+ "logits/chosen": 0.3388012647628784,
542
+ "logits/rejected": 0.3767550587654114,
543
+ "logps/chosen": -197.89669799804688,
544
+ "logps/rejected": -207.66709899902344,
545
+ "loss": 0.5962,
546
+ "rewards/accuracies": 0.682812511920929,
547
+ "rewards/chosen": -0.5828956961631775,
548
+ "rewards/margins": 0.3945348858833313,
549
+ "rewards/rejected": -0.9774305820465088,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.2552274197025221,
554
+ "grad_norm": 21.543666601248262,
555
+ "learning_rate": 8.28352490421456e-07,
556
+ "logits/chosen": 0.30326756834983826,
557
+ "logits/rejected": 0.3772284984588623,
558
+ "logps/chosen": -196.38209533691406,
559
+ "logps/rejected": -208.34056091308594,
560
+ "loss": 0.5912,
561
+ "rewards/accuracies": 0.6859375238418579,
562
+ "rewards/chosen": -0.5436594486236572,
563
+ "rewards/margins": 0.37626004219055176,
564
+ "rewards/rejected": -0.919919490814209,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.26212545807286053,
569
+ "grad_norm": 21.02248262252068,
570
+ "learning_rate": 8.206896551724138e-07,
571
+ "logits/chosen": 0.3109130859375,
572
+ "logits/rejected": 0.28564298152923584,
573
+ "logps/chosen": -204.91912841796875,
574
+ "logps/rejected": -221.65782165527344,
575
+ "loss": 0.5811,
576
+ "rewards/accuracies": 0.7000000476837158,
577
+ "rewards/chosen": -0.5830342173576355,
578
+ "rewards/margins": 0.42591309547424316,
579
+ "rewards/rejected": -1.0089473724365234,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.26902349644319895,
584
+ "grad_norm": 21.574655386442398,
585
+ "learning_rate": 8.130268199233717e-07,
586
+ "logits/chosen": 0.3019198477268219,
587
+ "logits/rejected": 0.321940153837204,
588
+ "logps/chosen": -207.87794494628906,
589
+ "logps/rejected": -218.37860107421875,
590
+ "loss": 0.5902,
591
+ "rewards/accuracies": 0.7000000476837158,
592
+ "rewards/chosen": -0.5571427345275879,
593
+ "rewards/margins": 0.3800555467605591,
594
+ "rewards/rejected": -0.937198281288147,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.27592153481353743,
599
+ "grad_norm": 23.222399130782183,
600
+ "learning_rate": 8.053639846743294e-07,
601
+ "logits/chosen": 0.2715260088443756,
602
+ "logits/rejected": 0.28244683146476746,
603
+ "logps/chosen": -214.16189575195312,
604
+ "logps/rejected": -226.54576110839844,
605
+ "loss": 0.5922,
606
+ "rewards/accuracies": 0.699999988079071,
607
+ "rewards/chosen": -0.6588503122329712,
608
+ "rewards/margins": 0.4110034704208374,
609
+ "rewards/rejected": -1.0698537826538086,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.28281957318387585,
614
+ "grad_norm": 20.668994395434144,
615
+ "learning_rate": 7.977011494252873e-07,
616
+ "logits/chosen": 0.2664548456668854,
617
+ "logits/rejected": 0.29690778255462646,
618
+ "logps/chosen": -204.0004425048828,
619
+ "logps/rejected": -213.70797729492188,
620
+ "loss": 0.5902,
621
+ "rewards/accuracies": 0.6937500238418579,
622
+ "rewards/chosen": -0.6476739645004272,
623
+ "rewards/margins": 0.4224002957344055,
624
+ "rewards/rejected": -1.070074200630188,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.28971761155421427,
629
+ "grad_norm": 23.387477174221438,
630
+ "learning_rate": 7.900383141762452e-07,
631
+ "logits/chosen": 0.26732271909713745,
632
+ "logits/rejected": 0.3153986930847168,
633
+ "logps/chosen": -196.22264099121094,
634
+ "logps/rejected": -206.97850036621094,
635
+ "loss": 0.5791,
636
+ "rewards/accuracies": 0.6843750476837158,
637
+ "rewards/chosen": -0.5797644853591919,
638
+ "rewards/margins": 0.4164075255393982,
639
+ "rewards/rejected": -0.9961719512939453,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.2966156499245527,
644
+ "grad_norm": 21.18326793123928,
645
+ "learning_rate": 7.82375478927203e-07,
646
+ "logits/chosen": 0.21811504662036896,
647
+ "logits/rejected": 0.2583531439304352,
648
+ "logps/chosen": -207.00149536132812,
649
+ "logps/rejected": -219.3418731689453,
650
+ "loss": 0.5384,
651
+ "rewards/accuracies": 0.7375000715255737,
652
+ "rewards/chosen": -0.5487433671951294,
653
+ "rewards/margins": 0.535973072052002,
654
+ "rewards/rejected": -1.0847163200378418,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.3035136882948911,
659
+ "grad_norm": 26.940070706652882,
660
+ "learning_rate": 7.747126436781609e-07,
661
+ "logits/chosen": 0.21293707191944122,
662
+ "logits/rejected": 0.22856633365154266,
663
+ "logps/chosen": -208.216796875,
664
+ "logps/rejected": -220.22528076171875,
665
+ "loss": 0.5753,
666
+ "rewards/accuracies": 0.7046875357627869,
667
+ "rewards/chosen": -0.6004849076271057,
668
+ "rewards/margins": 0.4932917356491089,
669
+ "rewards/rejected": -1.0937767028808594,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.3104117266652296,
674
+ "grad_norm": 17.108456205097973,
675
+ "learning_rate": 7.670498084291187e-07,
676
+ "logits/chosen": 0.21750858426094055,
677
+ "logits/rejected": 0.27465111017227173,
678
+ "logps/chosen": -200.55621337890625,
679
+ "logps/rejected": -210.05360412597656,
680
+ "loss": 0.5473,
681
+ "rewards/accuracies": 0.7406250238418579,
682
+ "rewards/chosen": -0.588002622127533,
683
+ "rewards/margins": 0.5616728067398071,
684
+ "rewards/rejected": -1.1496754884719849,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.317309765035568,
689
+ "grad_norm": 25.016877550394028,
690
+ "learning_rate": 7.593869731800766e-07,
691
+ "logits/chosen": 0.15050889551639557,
692
+ "logits/rejected": 0.24773681163787842,
693
+ "logps/chosen": -201.76043701171875,
694
+ "logps/rejected": -215.86187744140625,
695
+ "loss": 0.5984,
696
+ "rewards/accuracies": 0.7140625715255737,
697
+ "rewards/chosen": -0.6779018044471741,
698
+ "rewards/margins": 0.470897912979126,
699
+ "rewards/rejected": -1.1487996578216553,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.32420780340590644,
704
+ "grad_norm": 20.958071725485514,
705
+ "learning_rate": 7.517241379310344e-07,
706
+ "logits/chosen": 0.1510736346244812,
707
+ "logits/rejected": 0.24761702120304108,
708
+ "logps/chosen": -208.5487060546875,
709
+ "logps/rejected": -216.71127319335938,
710
+ "loss": 0.552,
711
+ "rewards/accuracies": 0.7234375476837158,
712
+ "rewards/chosen": -0.6728602647781372,
713
+ "rewards/margins": 0.5733956098556519,
714
+ "rewards/rejected": -1.246255874633789,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.33110584177624486,
719
+ "grad_norm": 21.914142386799554,
720
+ "learning_rate": 7.440613026819923e-07,
721
+ "logits/chosen": 0.2657942473888397,
722
+ "logits/rejected": 0.28262150287628174,
723
+ "logps/chosen": -200.79815673828125,
724
+ "logps/rejected": -213.93600463867188,
725
+ "loss": 0.5457,
726
+ "rewards/accuracies": 0.7156250476837158,
727
+ "rewards/chosen": -0.7010958790779114,
728
+ "rewards/margins": 0.5760525465011597,
729
+ "rewards/rejected": -1.2771484851837158,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.33800388014658334,
734
+ "grad_norm": 20.691760332158143,
735
+ "learning_rate": 7.363984674329502e-07,
736
+ "logits/chosen": 0.1785193532705307,
737
+ "logits/rejected": 0.2651832401752472,
738
+ "logps/chosen": -207.3889617919922,
739
+ "logps/rejected": -216.79388427734375,
740
+ "loss": 0.5569,
741
+ "rewards/accuracies": 0.7093750238418579,
742
+ "rewards/chosen": -0.7134870886802673,
743
+ "rewards/margins": 0.542286217212677,
744
+ "rewards/rejected": -1.2557734251022339,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.34490191851692176,
749
+ "grad_norm": 23.474962325162984,
750
+ "learning_rate": 7.28735632183908e-07,
751
+ "logits/chosen": 0.1998932659626007,
752
+ "logits/rejected": 0.24436387419700623,
753
+ "logps/chosen": -207.51589965820312,
754
+ "logps/rejected": -217.1973419189453,
755
+ "loss": 0.5488,
756
+ "rewards/accuracies": 0.7265625,
757
+ "rewards/chosen": -0.7165147066116333,
758
+ "rewards/margins": 0.6149377226829529,
759
+ "rewards/rejected": -1.3314523696899414,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.3517999568872602,
764
+ "grad_norm": 22.888075753667685,
765
+ "learning_rate": 7.210727969348659e-07,
766
+ "logits/chosen": 0.17916660010814667,
767
+ "logits/rejected": 0.2139005810022354,
768
+ "logps/chosen": -206.5091094970703,
769
+ "logps/rejected": -218.1505584716797,
770
+ "loss": 0.568,
771
+ "rewards/accuracies": 0.6859375238418579,
772
+ "rewards/chosen": -0.7650787830352783,
773
+ "rewards/margins": 0.5644460916519165,
774
+ "rewards/rejected": -1.3295247554779053,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.3586979952575986,
779
+ "grad_norm": 26.102732396040366,
780
+ "learning_rate": 7.134099616858237e-07,
781
+ "logits/chosen": 0.19337040185928345,
782
+ "logits/rejected": 0.17022080719470978,
783
+ "logps/chosen": -210.6927947998047,
784
+ "logps/rejected": -223.4407501220703,
785
+ "loss": 0.5676,
786
+ "rewards/accuracies": 0.723437488079071,
787
+ "rewards/chosen": -0.754781186580658,
788
+ "rewards/margins": 0.596481442451477,
789
+ "rewards/rejected": -1.3512626886367798,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.3655960336279371,
794
+ "grad_norm": 22.211123954654255,
795
+ "learning_rate": 7.057471264367816e-07,
796
+ "logits/chosen": 0.17498114705085754,
797
+ "logits/rejected": 0.21647687256336212,
798
+ "logps/chosen": -204.9669647216797,
799
+ "logps/rejected": -220.52435302734375,
800
+ "loss": 0.5301,
801
+ "rewards/accuracies": 0.7234375476837158,
802
+ "rewards/chosen": -0.6388720273971558,
803
+ "rewards/margins": 0.6742045879364014,
804
+ "rewards/rejected": -1.3130766153335571,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.3724940719982755,
809
+ "grad_norm": 19.802648080199884,
810
+ "learning_rate": 6.980842911877395e-07,
811
+ "logits/chosen": 0.19523748755455017,
812
+ "logits/rejected": 0.21661296486854553,
813
+ "logps/chosen": -204.8290557861328,
814
+ "logps/rejected": -221.68118286132812,
815
+ "loss": 0.5549,
816
+ "rewards/accuracies": 0.737500011920929,
817
+ "rewards/chosen": -0.6770638227462769,
818
+ "rewards/margins": 0.6107801198959351,
819
+ "rewards/rejected": -1.287843942642212,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.3793921103686139,
824
+ "grad_norm": 21.39010759759459,
825
+ "learning_rate": 6.904214559386973e-07,
826
+ "logits/chosen": 0.20838454365730286,
827
+ "logits/rejected": 0.21981817483901978,
828
+ "logps/chosen": -206.594482421875,
829
+ "logps/rejected": -226.77719116210938,
830
+ "loss": 0.5197,
831
+ "rewards/accuracies": 0.734375,
832
+ "rewards/chosen": -0.6928226351737976,
833
+ "rewards/margins": 0.7122121453285217,
834
+ "rewards/rejected": -1.4050347805023193,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.38629014873895234,
839
+ "grad_norm": 24.388542970501526,
840
+ "learning_rate": 6.827586206896552e-07,
841
+ "logits/chosen": 0.21371379494667053,
842
+ "logits/rejected": 0.2217874974012375,
843
+ "logps/chosen": -203.6668701171875,
844
+ "logps/rejected": -220.09390258789062,
845
+ "loss": 0.5603,
846
+ "rewards/accuracies": 0.7312500476837158,
847
+ "rewards/chosen": -0.7243704199790955,
848
+ "rewards/margins": 0.5787999629974365,
849
+ "rewards/rejected": -1.3031704425811768,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.3931881871092908,
854
+ "grad_norm": 18.31272079122668,
855
+ "learning_rate": 6.750957854406129e-07,
856
+ "logits/chosen": 0.19194141030311584,
857
+ "logits/rejected": 0.23956988751888275,
858
+ "logps/chosen": -200.03921508789062,
859
+ "logps/rejected": -220.68844604492188,
860
+ "loss": 0.5324,
861
+ "rewards/accuracies": 0.7406250238418579,
862
+ "rewards/chosen": -0.6924073696136475,
863
+ "rewards/margins": 0.7310079336166382,
864
+ "rewards/rejected": -1.4234153032302856,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.40008622547962924,
869
+ "grad_norm": 23.278396001880992,
870
+ "learning_rate": 6.674329501915708e-07,
871
+ "logits/chosen": 0.18476101756095886,
872
+ "logits/rejected": 0.3003040552139282,
873
+ "logps/chosen": -212.16586303710938,
874
+ "logps/rejected": -221.8807830810547,
875
+ "loss": 0.5603,
876
+ "rewards/accuracies": 0.7265625,
877
+ "rewards/chosen": -0.7272917032241821,
878
+ "rewards/margins": 0.594864010810852,
879
+ "rewards/rejected": -1.3221557140350342,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.40698426384996766,
884
+ "grad_norm": 22.774833668061657,
885
+ "learning_rate": 6.597701149425286e-07,
886
+ "logits/chosen": 0.27348437905311584,
887
+ "logits/rejected": 0.29119783639907837,
888
+ "logps/chosen": -208.50161743164062,
889
+ "logps/rejected": -223.21377563476562,
890
+ "loss": 0.5281,
891
+ "rewards/accuracies": 0.7437500357627869,
892
+ "rewards/chosen": -0.7116725444793701,
893
+ "rewards/margins": 0.6606653332710266,
894
+ "rewards/rejected": -1.3723379373550415,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.4138823022203061,
899
+ "grad_norm": 24.57400616632902,
900
+ "learning_rate": 6.521072796934865e-07,
901
+ "logits/chosen": 0.20140346884727478,
902
+ "logits/rejected": 0.233355313539505,
903
+ "logps/chosen": -197.4592742919922,
904
+ "logps/rejected": -212.82032775878906,
905
+ "loss": 0.5235,
906
+ "rewards/accuracies": 0.729687511920929,
907
+ "rewards/chosen": -0.6597610712051392,
908
+ "rewards/margins": 0.7397715449333191,
909
+ "rewards/rejected": -1.3995325565338135,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.4207803405906445,
914
+ "grad_norm": 21.41678620311021,
915
+ "learning_rate": 6.444444444444444e-07,
916
+ "logits/chosen": 0.2271338552236557,
917
+ "logits/rejected": 0.2577861547470093,
918
+ "logps/chosen": -210.82029724121094,
919
+ "logps/rejected": -230.77545166015625,
920
+ "loss": 0.5239,
921
+ "rewards/accuracies": 0.7515624761581421,
922
+ "rewards/chosen": -0.7849064469337463,
923
+ "rewards/margins": 0.7753893136978149,
924
+ "rewards/rejected": -1.560295820236206,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.427678378960983,
929
+ "grad_norm": 27.454507207142147,
930
+ "learning_rate": 6.367816091954022e-07,
931
+ "logits/chosen": 0.2676536440849304,
932
+ "logits/rejected": 0.2785859704017639,
933
+ "logps/chosen": -200.918701171875,
934
+ "logps/rejected": -218.0684814453125,
935
+ "loss": 0.5296,
936
+ "rewards/accuracies": 0.753125011920929,
937
+ "rewards/chosen": -0.7359431982040405,
938
+ "rewards/margins": 0.7085081338882446,
939
+ "rewards/rejected": -1.4444513320922852,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.4345764173313214,
944
+ "grad_norm": 23.055409383831122,
945
+ "learning_rate": 6.291187739463601e-07,
946
+ "logits/chosen": 0.22078415751457214,
947
+ "logits/rejected": 0.286234587430954,
948
+ "logps/chosen": -209.9963836669922,
949
+ "logps/rejected": -228.6524658203125,
950
+ "loss": 0.5234,
951
+ "rewards/accuracies": 0.7515624761581421,
952
+ "rewards/chosen": -0.7776533961296082,
953
+ "rewards/margins": 0.7566480040550232,
954
+ "rewards/rejected": -1.5343014001846313,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.44147445570165983,
959
+ "grad_norm": 20.425933679340293,
960
+ "learning_rate": 6.214559386973179e-07,
961
+ "logits/chosen": 0.2468743622303009,
962
+ "logits/rejected": 0.27129438519477844,
963
+ "logps/chosen": -207.75270080566406,
964
+ "logps/rejected": -220.77560424804688,
965
+ "loss": 0.5165,
966
+ "rewards/accuracies": 0.7406250238418579,
967
+ "rewards/chosen": -0.8151954412460327,
968
+ "rewards/margins": 0.7754901647567749,
969
+ "rewards/rejected": -1.5906856060028076,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.44837249407199825,
974
+ "grad_norm": 23.41797315632108,
975
+ "learning_rate": 6.137931034482758e-07,
976
+ "logits/chosen": 0.15889199078083038,
977
+ "logits/rejected": 0.2550981938838959,
978
+ "logps/chosen": -209.88893127441406,
979
+ "logps/rejected": -223.1925506591797,
980
+ "loss": 0.5388,
981
+ "rewards/accuracies": 0.7250000238418579,
982
+ "rewards/chosen": -0.8657385110855103,
983
+ "rewards/margins": 0.6958652138710022,
984
+ "rewards/rejected": -1.5616039037704468,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.45527053244233673,
989
+ "grad_norm": 26.006440186662328,
990
+ "learning_rate": 6.061302681992337e-07,
991
+ "logits/chosen": 0.2198886126279831,
992
+ "logits/rejected": 0.24105188250541687,
993
+ "logps/chosen": -211.81887817382812,
994
+ "logps/rejected": -229.9445037841797,
995
+ "loss": 0.5558,
996
+ "rewards/accuracies": 0.745312511920929,
997
+ "rewards/chosen": -0.9279370903968811,
998
+ "rewards/margins": 0.6996958255767822,
999
+ "rewards/rejected": -1.627632737159729,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.46216857081267515,
1004
+ "grad_norm": 24.986970918973174,
1005
+ "learning_rate": 5.984674329501915e-07,
1006
+ "logits/chosen": 0.21261385083198547,
1007
+ "logits/rejected": 0.23078909516334534,
1008
+ "logps/chosen": -197.61631774902344,
1009
+ "logps/rejected": -221.3162384033203,
1010
+ "loss": 0.5195,
1011
+ "rewards/accuracies": 0.760937511920929,
1012
+ "rewards/chosen": -0.8430317640304565,
1013
+ "rewards/margins": 0.8105250597000122,
1014
+ "rewards/rejected": -1.6535569429397583,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.46906660918301357,
1019
+ "grad_norm": 18.699804314432686,
1020
+ "learning_rate": 5.908045977011494e-07,
1021
+ "logits/chosen": 0.2194710671901703,
1022
+ "logits/rejected": 0.2245461493730545,
1023
+ "logps/chosen": -206.41171264648438,
1024
+ "logps/rejected": -225.4470977783203,
1025
+ "loss": 0.5034,
1026
+ "rewards/accuracies": 0.7640625238418579,
1027
+ "rewards/chosen": -0.8391079902648926,
1028
+ "rewards/margins": 0.8192622065544128,
1029
+ "rewards/rejected": -1.6583701372146606,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.475964647553352,
1034
+ "grad_norm": 27.943474512199224,
1035
+ "learning_rate": 5.831417624521072e-07,
1036
+ "logits/chosen": 0.17129182815551758,
1037
+ "logits/rejected": 0.26508453488349915,
1038
+ "logps/chosen": -197.73431396484375,
1039
+ "logps/rejected": -210.53427124023438,
1040
+ "loss": 0.5545,
1041
+ "rewards/accuracies": 0.71875,
1042
+ "rewards/chosen": -0.7708035707473755,
1043
+ "rewards/margins": 0.7140544056892395,
1044
+ "rewards/rejected": -1.4848580360412598,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.48286268592369047,
1049
+ "grad_norm": 18.799210721144906,
1050
+ "learning_rate": 5.754789272030651e-07,
1051
+ "logits/chosen": 0.20475523173809052,
1052
+ "logits/rejected": 0.3040182292461395,
1053
+ "logps/chosen": -213.24290466308594,
1054
+ "logps/rejected": -230.26467895507812,
1055
+ "loss": 0.5363,
1056
+ "rewards/accuracies": 0.739062488079071,
1057
+ "rewards/chosen": -0.8690279722213745,
1058
+ "rewards/margins": 0.7869499325752258,
1059
+ "rewards/rejected": -1.6559779644012451,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.4897607242940289,
1064
+ "grad_norm": 25.644011365351712,
1065
+ "learning_rate": 5.678160919540229e-07,
1066
+ "logits/chosen": 0.16487537324428558,
1067
+ "logits/rejected": 0.2230132669210434,
1068
+ "logps/chosen": -205.01168823242188,
1069
+ "logps/rejected": -218.5323486328125,
1070
+ "loss": 0.5603,
1071
+ "rewards/accuracies": 0.6921875476837158,
1072
+ "rewards/chosen": -0.9361888766288757,
1073
+ "rewards/margins": 0.6677310466766357,
1074
+ "rewards/rejected": -1.6039198637008667,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.4966587626643673,
1079
+ "grad_norm": 22.153532795458556,
1080
+ "learning_rate": 5.601532567049809e-07,
1081
+ "logits/chosen": 0.19576019048690796,
1082
+ "logits/rejected": 0.27333715558052063,
1083
+ "logps/chosen": -200.7130889892578,
1084
+ "logps/rejected": -219.6556854248047,
1085
+ "loss": 0.5005,
1086
+ "rewards/accuracies": 0.768750011920929,
1087
+ "rewards/chosen": -0.8190287947654724,
1088
+ "rewards/margins": 0.8135865330696106,
1089
+ "rewards/rejected": -1.632615327835083,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.5035568010347058,
1094
+ "grad_norm": 21.512930303082918,
1095
+ "learning_rate": 5.524904214559388e-07,
1096
+ "logits/chosen": 0.1499469131231308,
1097
+ "logits/rejected": 0.20389853417873383,
1098
+ "logps/chosen": -206.646484375,
1099
+ "logps/rejected": -228.37503051757812,
1100
+ "loss": 0.5219,
1101
+ "rewards/accuracies": 0.745312511920929,
1102
+ "rewards/chosen": -0.88575679063797,
1103
+ "rewards/margins": 0.7930079698562622,
1104
+ "rewards/rejected": -1.6787649393081665,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5104548394050442,
1109
+ "grad_norm": 24.61147206437457,
1110
+ "learning_rate": 5.448275862068966e-07,
1111
+ "logits/chosen": 0.18222621083259583,
1112
+ "logits/rejected": 0.22593136131763458,
1113
+ "logps/chosen": -216.77786254882812,
1114
+ "logps/rejected": -238.52584838867188,
1115
+ "loss": 0.4984,
1116
+ "rewards/accuracies": 0.746874988079071,
1117
+ "rewards/chosen": -0.9311150312423706,
1118
+ "rewards/margins": 0.8666337132453918,
1119
+ "rewards/rejected": -1.7977488040924072,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5173528777753826,
1124
+ "grad_norm": 24.318520131273452,
1125
+ "learning_rate": 5.371647509578544e-07,
1126
+ "logits/chosen": 0.18827559053897858,
1127
+ "logits/rejected": 0.24765178561210632,
1128
+ "logps/chosen": -201.89991760253906,
1129
+ "logps/rejected": -226.2533416748047,
1130
+ "loss": 0.4902,
1131
+ "rewards/accuracies": 0.7843749523162842,
1132
+ "rewards/chosen": -0.8815733790397644,
1133
+ "rewards/margins": 0.9414010047912598,
1134
+ "rewards/rejected": -1.822974443435669,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.5242509161457211,
1139
+ "grad_norm": 38.718538073738564,
1140
+ "learning_rate": 5.295019157088122e-07,
1141
+ "logits/chosen": 0.17381815612316132,
1142
+ "logits/rejected": 0.2428828477859497,
1143
+ "logps/chosen": -206.8114013671875,
1144
+ "logps/rejected": -225.81219482421875,
1145
+ "loss": 0.511,
1146
+ "rewards/accuracies": 0.762499988079071,
1147
+ "rewards/chosen": -1.0074797868728638,
1148
+ "rewards/margins": 0.8152840733528137,
1149
+ "rewards/rejected": -1.8227639198303223,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5311489545160595,
1154
+ "grad_norm": 20.957009249774774,
1155
+ "learning_rate": 5.218390804597701e-07,
1156
+ "logits/chosen": 0.11571212112903595,
1157
+ "logits/rejected": 0.18467916548252106,
1158
+ "logps/chosen": -216.8351287841797,
1159
+ "logps/rejected": -235.50743103027344,
1160
+ "loss": 0.4868,
1161
+ "rewards/accuracies": 0.7749999761581421,
1162
+ "rewards/chosen": -0.9625260233879089,
1163
+ "rewards/margins": 0.9064898490905762,
1164
+ "rewards/rejected": -1.8690159320831299,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.5380469928863979,
1169
+ "grad_norm": 25.56373586600605,
1170
+ "learning_rate": 5.14176245210728e-07,
1171
+ "logits/chosen": 0.20467066764831543,
1172
+ "logits/rejected": 0.1702248752117157,
1173
+ "logps/chosen": -211.35308837890625,
1174
+ "logps/rejected": -236.63363647460938,
1175
+ "loss": 0.5033,
1176
+ "rewards/accuracies": 0.745312511920929,
1177
+ "rewards/chosen": -1.0503700971603394,
1178
+ "rewards/margins": 0.8642019033432007,
1179
+ "rewards/rejected": -1.91457200050354,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5449450312567363,
1184
+ "grad_norm": 24.493933489793076,
1185
+ "learning_rate": 5.065134099616858e-07,
1186
+ "logits/chosen": 0.13319827616214752,
1187
+ "logits/rejected": 0.18873748183250427,
1188
+ "logps/chosen": -205.13214111328125,
1189
+ "logps/rejected": -226.38357543945312,
1190
+ "loss": 0.4779,
1191
+ "rewards/accuracies": 0.768750011920929,
1192
+ "rewards/chosen": -1.0021754503250122,
1193
+ "rewards/margins": 0.9200892448425293,
1194
+ "rewards/rejected": -1.9222646951675415,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5518430696270749,
1199
+ "grad_norm": 23.309860892057635,
1200
+ "learning_rate": 4.988505747126436e-07,
1201
+ "logits/chosen": 0.12849828600883484,
1202
+ "logits/rejected": 0.22922158241271973,
1203
+ "logps/chosen": -219.03140258789062,
1204
+ "logps/rejected": -238.4054412841797,
1205
+ "loss": 0.5141,
1206
+ "rewards/accuracies": 0.746874988079071,
1207
+ "rewards/chosen": -1.1349990367889404,
1208
+ "rewards/margins": 0.8661084771156311,
1209
+ "rewards/rejected": -2.0011074542999268,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5587411079974133,
1214
+ "grad_norm": 23.833527951692258,
1215
+ "learning_rate": 4.911877394636015e-07,
1216
+ "logits/chosen": 0.15572428703308105,
1217
+ "logits/rejected": 0.2141249179840088,
1218
+ "logps/chosen": -214.1897430419922,
1219
+ "logps/rejected": -235.65524291992188,
1220
+ "loss": 0.5401,
1221
+ "rewards/accuracies": 0.7671875357627869,
1222
+ "rewards/chosen": -1.1772220134735107,
1223
+ "rewards/margins": 0.9108952879905701,
1224
+ "rewards/rejected": -2.0881171226501465,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5656391463677517,
1229
+ "grad_norm": 22.362990514361123,
1230
+ "learning_rate": 4.835249042145594e-07,
1231
+ "logits/chosen": 0.13951486349105835,
1232
+ "logits/rejected": 0.19652993977069855,
1233
+ "logps/chosen": -212.92039489746094,
1234
+ "logps/rejected": -235.84652709960938,
1235
+ "loss": 0.4873,
1236
+ "rewards/accuracies": 0.78125,
1237
+ "rewards/chosen": -1.0033161640167236,
1238
+ "rewards/margins": 0.9333437085151672,
1239
+ "rewards/rejected": -1.936659812927246,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.5725371847380901,
1244
+ "grad_norm": 22.124457707735143,
1245
+ "learning_rate": 4.7586206896551725e-07,
1246
+ "logits/chosen": 0.1511804461479187,
1247
+ "logits/rejected": 0.16028037667274475,
1248
+ "logps/chosen": -221.8773193359375,
1249
+ "logps/rejected": -240.11473083496094,
1250
+ "loss": 0.503,
1251
+ "rewards/accuracies": 0.7593750357627869,
1252
+ "rewards/chosen": -1.0800548791885376,
1253
+ "rewards/margins": 0.8837151527404785,
1254
+ "rewards/rejected": -1.9637700319290161,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5794352231084285,
1259
+ "grad_norm": 23.474946170740125,
1260
+ "learning_rate": 4.681992337164751e-07,
1261
+ "logits/chosen": 0.16761063039302826,
1262
+ "logits/rejected": 0.24748662114143372,
1263
+ "logps/chosen": -215.3556365966797,
1264
+ "logps/rejected": -237.9365692138672,
1265
+ "loss": 0.5211,
1266
+ "rewards/accuracies": 0.7500000596046448,
1267
+ "rewards/chosen": -1.1380001306533813,
1268
+ "rewards/margins": 0.9258146286010742,
1269
+ "rewards/rejected": -2.063814878463745,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.586333261478767,
1274
+ "grad_norm": 22.631103668833546,
1275
+ "learning_rate": 4.6053639846743294e-07,
1276
+ "logits/chosen": 0.1405535489320755,
1277
+ "logits/rejected": 0.2345820665359497,
1278
+ "logps/chosen": -217.42453002929688,
1279
+ "logps/rejected": -235.82891845703125,
1280
+ "loss": 0.481,
1281
+ "rewards/accuracies": 0.7749999761581421,
1282
+ "rewards/chosen": -0.9171855449676514,
1283
+ "rewards/margins": 0.9807693362236023,
1284
+ "rewards/rejected": -1.8979549407958984,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.5932312998491054,
1289
+ "grad_norm": 22.177230826478425,
1290
+ "learning_rate": 4.528735632183908e-07,
1291
+ "logits/chosen": 0.12721370160579681,
1292
+ "logits/rejected": 0.17560192942619324,
1293
+ "logps/chosen": -210.74789428710938,
1294
+ "logps/rejected": -235.66932678222656,
1295
+ "loss": 0.4794,
1296
+ "rewards/accuracies": 0.778124988079071,
1297
+ "rewards/chosen": -1.1062588691711426,
1298
+ "rewards/margins": 1.0230000019073486,
1299
+ "rewards/rejected": -2.1292591094970703,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.6001293382194438,
1304
+ "grad_norm": 23.02504819124567,
1305
+ "learning_rate": 4.452107279693487e-07,
1306
+ "logits/chosen": 0.10594005882740021,
1307
+ "logits/rejected": 0.163488507270813,
1308
+ "logps/chosen": -225.18484497070312,
1309
+ "logps/rejected": -245.45799255371094,
1310
+ "loss": 0.4731,
1311
+ "rewards/accuracies": 0.776562511920929,
1312
+ "rewards/chosen": -1.194356083869934,
1313
+ "rewards/margins": 1.0393599271774292,
1314
+ "rewards/rejected": -2.2337160110473633,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.6070273765897822,
1319
+ "grad_norm": 25.061402331091273,
1320
+ "learning_rate": 4.375478927203065e-07,
1321
+ "logits/chosen": 0.10525962710380554,
1322
+ "logits/rejected": 0.16414019465446472,
1323
+ "logps/chosen": -223.8541259765625,
1324
+ "logps/rejected": -242.67599487304688,
1325
+ "loss": 0.5277,
1326
+ "rewards/accuracies": 0.7562499642372131,
1327
+ "rewards/chosen": -1.145894169807434,
1328
+ "rewards/margins": 0.9744288921356201,
1329
+ "rewards/rejected": -2.1203231811523438,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6139254149601208,
1334
+ "grad_norm": 31.327283409677584,
1335
+ "learning_rate": 4.2988505747126437e-07,
1336
+ "logits/chosen": 0.11804062128067017,
1337
+ "logits/rejected": 0.19818758964538574,
1338
+ "logps/chosen": -217.53366088867188,
1339
+ "logps/rejected": -236.1646270751953,
1340
+ "loss": 0.4904,
1341
+ "rewards/accuracies": 0.78125,
1342
+ "rewards/chosen": -1.151870846748352,
1343
+ "rewards/margins": 0.9757134914398193,
1344
+ "rewards/rejected": -2.127584457397461,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.6208234533304592,
1349
+ "grad_norm": 24.285377972042877,
1350
+ "learning_rate": 4.222222222222222e-07,
1351
+ "logits/chosen": 0.1879817396402359,
1352
+ "logits/rejected": 0.1984453946352005,
1353
+ "logps/chosen": -219.58839416503906,
1354
+ "logps/rejected": -239.14683532714844,
1355
+ "loss": 0.4955,
1356
+ "rewards/accuracies": 0.765625,
1357
+ "rewards/chosen": -1.072831630706787,
1358
+ "rewards/margins": 0.972345232963562,
1359
+ "rewards/rejected": -2.0451767444610596,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.6277214917007976,
1364
+ "grad_norm": 21.37218624566544,
1365
+ "learning_rate": 4.1455938697318005e-07,
1366
+ "logits/chosen": 0.11960437893867493,
1367
+ "logits/rejected": 0.18714536726474762,
1368
+ "logps/chosen": -208.94317626953125,
1369
+ "logps/rejected": -227.9283447265625,
1370
+ "loss": 0.4906,
1371
+ "rewards/accuracies": 0.7687499523162842,
1372
+ "rewards/chosen": -1.0316712856292725,
1373
+ "rewards/margins": 0.9537010192871094,
1374
+ "rewards/rejected": -1.9853723049163818,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.634619530071136,
1379
+ "grad_norm": 23.23303919337177,
1380
+ "learning_rate": 4.068965517241379e-07,
1381
+ "logits/chosen": 0.16769134998321533,
1382
+ "logits/rejected": 0.23668895661830902,
1383
+ "logps/chosen": -215.3607635498047,
1384
+ "logps/rejected": -242.46231079101562,
1385
+ "loss": 0.4652,
1386
+ "rewards/accuracies": 0.8046875,
1387
+ "rewards/chosen": -1.0188840627670288,
1388
+ "rewards/margins": 1.050809621810913,
1389
+ "rewards/rejected": -2.0696938037872314,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6415175684414745,
1394
+ "grad_norm": 22.586673634781295,
1395
+ "learning_rate": 3.992337164750958e-07,
1396
+ "logits/chosen": 0.16347406804561615,
1397
+ "logits/rejected": 0.22088101506233215,
1398
+ "logps/chosen": -219.6250457763672,
1399
+ "logps/rejected": -243.36587524414062,
1400
+ "loss": 0.4609,
1401
+ "rewards/accuracies": 0.792187511920929,
1402
+ "rewards/chosen": -1.0500195026397705,
1403
+ "rewards/margins": 1.0837587118148804,
1404
+ "rewards/rejected": -2.1337780952453613,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.6484156068118129,
1409
+ "grad_norm": 22.98002801655956,
1410
+ "learning_rate": 3.9157088122605364e-07,
1411
+ "logits/chosen": 0.18367891013622284,
1412
+ "logits/rejected": 0.17907743155956268,
1413
+ "logps/chosen": -214.0507354736328,
1414
+ "logps/rejected": -238.12570190429688,
1415
+ "loss": 0.4956,
1416
+ "rewards/accuracies": 0.7671875357627869,
1417
+ "rewards/chosen": -1.1288217306137085,
1418
+ "rewards/margins": 0.9605937004089355,
1419
+ "rewards/rejected": -2.0894155502319336,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.6553136451821513,
1424
+ "grad_norm": 17.84640106890724,
1425
+ "learning_rate": 3.839080459770115e-07,
1426
+ "logits/chosen": 0.18814033269882202,
1427
+ "logits/rejected": 0.26034802198410034,
1428
+ "logps/chosen": -216.52496337890625,
1429
+ "logps/rejected": -243.90826416015625,
1430
+ "loss": 0.4729,
1431
+ "rewards/accuracies": 0.776562511920929,
1432
+ "rewards/chosen": -1.0930984020233154,
1433
+ "rewards/margins": 1.0433658361434937,
1434
+ "rewards/rejected": -2.1364643573760986,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6622116835524897,
1439
+ "grad_norm": 22.315274563938523,
1440
+ "learning_rate": 3.762452107279693e-07,
1441
+ "logits/chosen": 0.13583947718143463,
1442
+ "logits/rejected": 0.1586841642856598,
1443
+ "logps/chosen": -210.781982421875,
1444
+ "logps/rejected": -234.8236083984375,
1445
+ "loss": 0.4797,
1446
+ "rewards/accuracies": 0.7734375,
1447
+ "rewards/chosen": -1.0251853466033936,
1448
+ "rewards/margins": 1.022051215171814,
1449
+ "rewards/rejected": -2.047236442565918,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6691097219228282,
1454
+ "grad_norm": 21.660020820117154,
1455
+ "learning_rate": 3.6858237547892717e-07,
1456
+ "logits/chosen": 0.13990166783332825,
1457
+ "logits/rejected": 0.19136521220207214,
1458
+ "logps/chosen": -210.8424072265625,
1459
+ "logps/rejected": -236.0836181640625,
1460
+ "loss": 0.4687,
1461
+ "rewards/accuracies": 0.7875000238418579,
1462
+ "rewards/chosen": -1.101256251335144,
1463
+ "rewards/margins": 1.099085807800293,
1464
+ "rewards/rejected": -2.2003421783447266,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6760077602931667,
1469
+ "grad_norm": 21.295429551155927,
1470
+ "learning_rate": 3.60919540229885e-07,
1471
+ "logits/chosen": 0.13500508666038513,
1472
+ "logits/rejected": 0.1818273663520813,
1473
+ "logps/chosen": -215.73947143554688,
1474
+ "logps/rejected": -237.76312255859375,
1475
+ "loss": 0.4689,
1476
+ "rewards/accuracies": 0.7890625596046448,
1477
+ "rewards/chosen": -1.0879331827163696,
1478
+ "rewards/margins": 1.0524659156799316,
1479
+ "rewards/rejected": -2.1403989791870117,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6829057986635051,
1484
+ "grad_norm": 24.350539790328526,
1485
+ "learning_rate": 3.532567049808429e-07,
1486
+ "logits/chosen": 0.18764442205429077,
1487
+ "logits/rejected": 0.14521312713623047,
1488
+ "logps/chosen": -210.7366943359375,
1489
+ "logps/rejected": -237.19691467285156,
1490
+ "loss": 0.4946,
1491
+ "rewards/accuracies": 0.776562511920929,
1492
+ "rewards/chosen": -1.1615097522735596,
1493
+ "rewards/margins": 1.009273886680603,
1494
+ "rewards/rejected": -2.170783758163452,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.6898038370338435,
1499
+ "grad_norm": 24.430527768004612,
1500
+ "learning_rate": 3.4559386973180075e-07,
1501
+ "logits/chosen": 0.1154608502984047,
1502
+ "logits/rejected": 0.14394116401672363,
1503
+ "logps/chosen": -211.67010498046875,
1504
+ "logps/rejected": -235.22601318359375,
1505
+ "loss": 0.4856,
1506
+ "rewards/accuracies": 0.7718750238418579,
1507
+ "rewards/chosen": -1.1421856880187988,
1508
+ "rewards/margins": 1.003755807876587,
1509
+ "rewards/rejected": -2.1459412574768066,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6967018754041819,
1514
+ "grad_norm": 25.333257716089456,
1515
+ "learning_rate": 3.379310344827586e-07,
1516
+ "logits/chosen": 0.05758751928806305,
1517
+ "logits/rejected": 0.23519480228424072,
1518
+ "logps/chosen": -220.646484375,
1519
+ "logps/rejected": -239.65780639648438,
1520
+ "loss": 0.4788,
1521
+ "rewards/accuracies": 0.785937488079071,
1522
+ "rewards/chosen": -1.2021660804748535,
1523
+ "rewards/margins": 1.0922119617462158,
1524
+ "rewards/rejected": -2.2943780422210693,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.7035999137745204,
1529
+ "grad_norm": 23.831188739778135,
1530
+ "learning_rate": 3.3026819923371644e-07,
1531
+ "logits/chosen": 0.1304895281791687,
1532
+ "logits/rejected": 0.19071732461452484,
1533
+ "logps/chosen": -210.5736083984375,
1534
+ "logps/rejected": -232.50901794433594,
1535
+ "loss": 0.4723,
1536
+ "rewards/accuracies": 0.7656250596046448,
1537
+ "rewards/chosen": -1.054470181465149,
1538
+ "rewards/margins": 1.0092376470565796,
1539
+ "rewards/rejected": -2.0637075901031494,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.7104979521448588,
1544
+ "grad_norm": 21.71715110582083,
1545
+ "learning_rate": 3.226053639846743e-07,
1546
+ "logits/chosen": 0.0674755871295929,
1547
+ "logits/rejected": 0.1555739939212799,
1548
+ "logps/chosen": -208.80389404296875,
1549
+ "logps/rejected": -228.76556396484375,
1550
+ "loss": 0.4899,
1551
+ "rewards/accuracies": 0.784375011920929,
1552
+ "rewards/chosen": -1.0720574855804443,
1553
+ "rewards/margins": 0.988200306892395,
1554
+ "rewards/rejected": -2.060257911682129,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7173959905151972,
1559
+ "grad_norm": 19.885607800102576,
1560
+ "learning_rate": 3.149425287356321e-07,
1561
+ "logits/chosen": 0.11615429818630219,
1562
+ "logits/rejected": 0.13085736334323883,
1563
+ "logps/chosen": -223.4676513671875,
1564
+ "logps/rejected": -245.81130981445312,
1565
+ "loss": 0.4572,
1566
+ "rewards/accuracies": 0.7859375476837158,
1567
+ "rewards/chosen": -1.104932188987732,
1568
+ "rewards/margins": 1.1073505878448486,
1569
+ "rewards/rejected": -2.212282657623291,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.7242940288855356,
1574
+ "grad_norm": 22.633180774780207,
1575
+ "learning_rate": 3.0727969348659e-07,
1576
+ "logits/chosen": 0.11931788921356201,
1577
+ "logits/rejected": 0.1989806443452835,
1578
+ "logps/chosen": -223.46701049804688,
1579
+ "logps/rejected": -245.942626953125,
1580
+ "loss": 0.476,
1581
+ "rewards/accuracies": 0.7953125238418579,
1582
+ "rewards/chosen": -1.1209880113601685,
1583
+ "rewards/margins": 1.1126110553741455,
1584
+ "rewards/rejected": -2.2335991859436035,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7311920672558742,
1589
+ "grad_norm": 22.607339620969345,
1590
+ "learning_rate": 2.996168582375479e-07,
1591
+ "logits/chosen": 0.15352767705917358,
1592
+ "logits/rejected": 0.22844256460666656,
1593
+ "logps/chosen": -208.1845703125,
1594
+ "logps/rejected": -230.12515258789062,
1595
+ "loss": 0.4797,
1596
+ "rewards/accuracies": 0.7859375476837158,
1597
+ "rewards/chosen": -1.1647709608078003,
1598
+ "rewards/margins": 1.111741542816162,
1599
+ "rewards/rejected": -2.276512622833252,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.7380901056262126,
1604
+ "grad_norm": 22.10853554321452,
1605
+ "learning_rate": 2.9195402298850576e-07,
1606
+ "logits/chosen": 0.14773155748844147,
1607
+ "logits/rejected": 0.1732293963432312,
1608
+ "logps/chosen": -209.5767059326172,
1609
+ "logps/rejected": -239.27427673339844,
1610
+ "loss": 0.4584,
1611
+ "rewards/accuracies": 0.8046875,
1612
+ "rewards/chosen": -1.1018071174621582,
1613
+ "rewards/margins": 1.0996737480163574,
1614
+ "rewards/rejected": -2.2014808654785156,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.744988143996551,
1619
+ "grad_norm": 24.492427020207415,
1620
+ "learning_rate": 2.842911877394636e-07,
1621
+ "logits/chosen": 0.1176578477025032,
1622
+ "logits/rejected": 0.18459954857826233,
1623
+ "logps/chosen": -208.61622619628906,
1624
+ "logps/rejected": -232.27537536621094,
1625
+ "loss": 0.4752,
1626
+ "rewards/accuracies": 0.7843750715255737,
1627
+ "rewards/chosen": -1.079516053199768,
1628
+ "rewards/margins": 1.0623971223831177,
1629
+ "rewards/rejected": -2.141913414001465,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7518861823668894,
1634
+ "grad_norm": 40.036510583262064,
1635
+ "learning_rate": 2.7662835249042145e-07,
1636
+ "logits/chosen": 0.15787634253501892,
1637
+ "logits/rejected": 0.1683928668498993,
1638
+ "logps/chosen": -218.29855346679688,
1639
+ "logps/rejected": -239.5789794921875,
1640
+ "loss": 0.4815,
1641
+ "rewards/accuracies": 0.768750011920929,
1642
+ "rewards/chosen": -1.1719642877578735,
1643
+ "rewards/margins": 1.0352319478988647,
1644
+ "rewards/rejected": -2.2071962356567383,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7587842207372278,
1649
+ "grad_norm": 21.374240830481533,
1650
+ "learning_rate": 2.689655172413793e-07,
1651
+ "logits/chosen": 0.17153756320476532,
1652
+ "logits/rejected": 0.17747098207473755,
1653
+ "logps/chosen": -210.64163208007812,
1654
+ "logps/rejected": -234.2825927734375,
1655
+ "loss": 0.488,
1656
+ "rewards/accuracies": 0.7750000357627869,
1657
+ "rewards/chosen": -1.1999056339263916,
1658
+ "rewards/margins": 0.9716651439666748,
1659
+ "rewards/rejected": -2.1715710163116455,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7656822591075663,
1664
+ "grad_norm": 23.23316889911473,
1665
+ "learning_rate": 2.6130268199233714e-07,
1666
+ "logits/chosen": 0.15322671830654144,
1667
+ "logits/rejected": 0.18879210948944092,
1668
+ "logps/chosen": -214.78797912597656,
1669
+ "logps/rejected": -239.0963897705078,
1670
+ "loss": 0.4684,
1671
+ "rewards/accuracies": 0.8015625476837158,
1672
+ "rewards/chosen": -1.113561749458313,
1673
+ "rewards/margins": 1.0662989616394043,
1674
+ "rewards/rejected": -2.1798605918884277,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.7725802974779047,
1679
+ "grad_norm": 21.606963571039277,
1680
+ "learning_rate": 2.5363984674329503e-07,
1681
+ "logits/chosen": 0.18151381611824036,
1682
+ "logits/rejected": 0.19774743914604187,
1683
+ "logps/chosen": -213.07774353027344,
1684
+ "logps/rejected": -236.7171173095703,
1685
+ "loss": 0.4579,
1686
+ "rewards/accuracies": 0.784375011920929,
1687
+ "rewards/chosen": -1.1090879440307617,
1688
+ "rewards/margins": 1.1060962677001953,
1689
+ "rewards/rejected": -2.215184450149536,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7794783358482431,
1694
+ "grad_norm": 31.597389723510716,
1695
+ "learning_rate": 2.459770114942529e-07,
1696
+ "logits/chosen": 0.12418875098228455,
1697
+ "logits/rejected": 0.25851666927337646,
1698
+ "logps/chosen": -207.92999267578125,
1699
+ "logps/rejected": -228.67929077148438,
1700
+ "loss": 0.4743,
1701
+ "rewards/accuracies": 0.768750011920929,
1702
+ "rewards/chosen": -1.1108900308609009,
1703
+ "rewards/margins": 1.0005470514297485,
1704
+ "rewards/rejected": -2.1114370822906494,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7863763742185816,
1709
+ "grad_norm": 22.767345565284042,
1710
+ "learning_rate": 2.3831417624521072e-07,
1711
+ "logits/chosen": 0.14228317141532898,
1712
+ "logits/rejected": 0.21690289676189423,
1713
+ "logps/chosen": -224.71209716796875,
1714
+ "logps/rejected": -243.876953125,
1715
+ "loss": 0.4685,
1716
+ "rewards/accuracies": 0.7750000357627869,
1717
+ "rewards/chosen": -1.2229084968566895,
1718
+ "rewards/margins": 1.051608920097351,
1719
+ "rewards/rejected": -2.27451753616333,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7932744125889201,
1724
+ "grad_norm": 27.34667176932844,
1725
+ "learning_rate": 2.3065134099616856e-07,
1726
+ "logits/chosen": 0.07832491397857666,
1727
+ "logits/rejected": 0.1705821305513382,
1728
+ "logps/chosen": -221.5072784423828,
1729
+ "logps/rejected": -246.1153564453125,
1730
+ "loss": 0.4579,
1731
+ "rewards/accuracies": 0.7984375357627869,
1732
+ "rewards/chosen": -1.1345467567443848,
1733
+ "rewards/margins": 1.146794319152832,
1734
+ "rewards/rejected": -2.281341075897217,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.8001724509592585,
1739
+ "grad_norm": 23.770336500523456,
1740
+ "learning_rate": 2.2298850574712643e-07,
1741
+ "logits/chosen": 0.14851118624210358,
1742
+ "logits/rejected": 0.18459179997444153,
1743
+ "logps/chosen": -216.7705535888672,
1744
+ "logps/rejected": -236.89364624023438,
1745
+ "loss": 0.4805,
1746
+ "rewards/accuracies": 0.7578125,
1747
+ "rewards/chosen": -1.1201305389404297,
1748
+ "rewards/margins": 1.0015616416931152,
1749
+ "rewards/rejected": -2.121692180633545,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.8070704893295969,
1754
+ "grad_norm": 30.260951075153393,
1755
+ "learning_rate": 2.1532567049808428e-07,
1756
+ "logits/chosen": 0.2019878774881363,
1757
+ "logits/rejected": 0.1767769604921341,
1758
+ "logps/chosen": -209.51361083984375,
1759
+ "logps/rejected": -239.5547332763672,
1760
+ "loss": 0.4705,
1761
+ "rewards/accuracies": 0.778124988079071,
1762
+ "rewards/chosen": -1.2167738676071167,
1763
+ "rewards/margins": 1.0992836952209473,
1764
+ "rewards/rejected": -2.3160576820373535,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.8139685276999353,
1769
+ "grad_norm": 27.377397146898016,
1770
+ "learning_rate": 2.0766283524904212e-07,
1771
+ "logits/chosen": 0.1637146770954132,
1772
+ "logits/rejected": 0.18806767463684082,
1773
+ "logps/chosen": -216.56570434570312,
1774
+ "logps/rejected": -240.6849822998047,
1775
+ "loss": 0.4357,
1776
+ "rewards/accuracies": 0.7984375357627869,
1777
+ "rewards/chosen": -1.123631238937378,
1778
+ "rewards/margins": 1.1824723482131958,
1779
+ "rewards/rejected": -2.3061037063598633,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8208665660702738,
1784
+ "grad_norm": 25.354240699491303,
1785
+ "learning_rate": 2e-07,
1786
+ "logits/chosen": 0.14868010580539703,
1787
+ "logits/rejected": 0.14347989857196808,
1788
+ "logps/chosen": -225.78515625,
1789
+ "logps/rejected": -256.4184875488281,
1790
+ "loss": 0.4544,
1791
+ "rewards/accuracies": 0.8109375238418579,
1792
+ "rewards/chosen": -1.2112679481506348,
1793
+ "rewards/margins": 1.1206707954406738,
1794
+ "rewards/rejected": -2.3319387435913086,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.8277646044406122,
1799
+ "grad_norm": 20.190249426572805,
1800
+ "learning_rate": 1.9233716475095783e-07,
1801
+ "logits/chosen": 0.17432813346385956,
1802
+ "logits/rejected": 0.19997775554656982,
1803
+ "logps/chosen": -214.68603515625,
1804
+ "logps/rejected": -242.19955444335938,
1805
+ "loss": 0.4769,
1806
+ "rewards/accuracies": 0.7718750238418579,
1807
+ "rewards/chosen": -1.2452560663223267,
1808
+ "rewards/margins": 1.1149595975875854,
1809
+ "rewards/rejected": -2.360215663909912,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.8346626428109506,
1814
+ "grad_norm": 22.228370921152507,
1815
+ "learning_rate": 1.846743295019157e-07,
1816
+ "logits/chosen": 0.12424763292074203,
1817
+ "logits/rejected": 0.19307748973369598,
1818
+ "logps/chosen": -219.47999572753906,
1819
+ "logps/rejected": -244.67124938964844,
1820
+ "loss": 0.4646,
1821
+ "rewards/accuracies": 0.7437500357627869,
1822
+ "rewards/chosen": -1.2520346641540527,
1823
+ "rewards/margins": 1.1548492908477783,
1824
+ "rewards/rejected": -2.406883955001831,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.841560681181289,
1829
+ "grad_norm": 25.41931454357561,
1830
+ "learning_rate": 1.7701149425287357e-07,
1831
+ "logits/chosen": 0.12825195491313934,
1832
+ "logits/rejected": 0.17117546498775482,
1833
+ "logps/chosen": -213.5985870361328,
1834
+ "logps/rejected": -235.36941528320312,
1835
+ "loss": 0.4633,
1836
+ "rewards/accuracies": 0.7984375357627869,
1837
+ "rewards/chosen": -1.148476243019104,
1838
+ "rewards/margins": 1.119852066040039,
1839
+ "rewards/rejected": -2.2683281898498535,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8484587195516275,
1844
+ "grad_norm": 30.167829998794836,
1845
+ "learning_rate": 1.6934865900383142e-07,
1846
+ "logits/chosen": 0.19128039479255676,
1847
+ "logits/rejected": 0.2100141942501068,
1848
+ "logps/chosen": -222.0583953857422,
1849
+ "logps/rejected": -245.87979125976562,
1850
+ "loss": 0.4738,
1851
+ "rewards/accuracies": 0.7687499523162842,
1852
+ "rewards/chosen": -1.3405332565307617,
1853
+ "rewards/margins": 1.0950267314910889,
1854
+ "rewards/rejected": -2.4355597496032715,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.855356757921966,
1859
+ "grad_norm": 26.728879899623912,
1860
+ "learning_rate": 1.6168582375478926e-07,
1861
+ "logits/chosen": 0.12200842052698135,
1862
+ "logits/rejected": 0.17066144943237305,
1863
+ "logps/chosen": -214.36114501953125,
1864
+ "logps/rejected": -235.70079040527344,
1865
+ "loss": 0.4701,
1866
+ "rewards/accuracies": 0.7750000357627869,
1867
+ "rewards/chosen": -1.2356151342391968,
1868
+ "rewards/margins": 1.0795714855194092,
1869
+ "rewards/rejected": -2.3151865005493164,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8622547962923044,
1874
+ "grad_norm": 25.162229685459597,
1875
+ "learning_rate": 1.5402298850574713e-07,
1876
+ "logits/chosen": 0.13873139023780823,
1877
+ "logits/rejected": 0.19574517011642456,
1878
+ "logps/chosen": -217.75022888183594,
1879
+ "logps/rejected": -240.75650024414062,
1880
+ "loss": 0.4404,
1881
+ "rewards/accuracies": 0.7859375476837158,
1882
+ "rewards/chosen": -1.2010749578475952,
1883
+ "rewards/margins": 1.2063066959381104,
1884
+ "rewards/rejected": -2.407381534576416,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8691528346626428,
1889
+ "grad_norm": 23.666288426380707,
1890
+ "learning_rate": 1.4636015325670498e-07,
1891
+ "logits/chosen": 0.11364492774009705,
1892
+ "logits/rejected": 0.1443002074956894,
1893
+ "logps/chosen": -219.42230224609375,
1894
+ "logps/rejected": -244.23477172851562,
1895
+ "loss": 0.4612,
1896
+ "rewards/accuracies": 0.768750011920929,
1897
+ "rewards/chosen": -1.271179437637329,
1898
+ "rewards/margins": 1.1596243381500244,
1899
+ "rewards/rejected": -2.4308037757873535,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.8760508730329812,
1904
+ "grad_norm": 21.869263002803784,
1905
+ "learning_rate": 1.3869731800766282e-07,
1906
+ "logits/chosen": 0.14046192169189453,
1907
+ "logits/rejected": 0.17701970040798187,
1908
+ "logps/chosen": -217.38250732421875,
1909
+ "logps/rejected": -240.967529296875,
1910
+ "loss": 0.4529,
1911
+ "rewards/accuracies": 0.796875,
1912
+ "rewards/chosen": -1.199389934539795,
1913
+ "rewards/margins": 1.158388376235962,
1914
+ "rewards/rejected": -2.357778549194336,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8829489114033197,
1919
+ "grad_norm": 23.643447697873064,
1920
+ "learning_rate": 1.310344827586207e-07,
1921
+ "logits/chosen": 0.12723781168460846,
1922
+ "logits/rejected": 0.19653302431106567,
1923
+ "logps/chosen": -215.25897216796875,
1924
+ "logps/rejected": -236.8069305419922,
1925
+ "loss": 0.4947,
1926
+ "rewards/accuracies": 0.7828125357627869,
1927
+ "rewards/chosen": -1.253035306930542,
1928
+ "rewards/margins": 1.1224628686904907,
1929
+ "rewards/rejected": -2.3754982948303223,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8898469497736581,
1934
+ "grad_norm": 25.274424651570303,
1935
+ "learning_rate": 1.2337164750957853e-07,
1936
+ "logits/chosen": 0.18923957645893097,
1937
+ "logits/rejected": 0.18438169360160828,
1938
+ "logps/chosen": -214.2564239501953,
1939
+ "logps/rejected": -239.39871215820312,
1940
+ "loss": 0.4644,
1941
+ "rewards/accuracies": 0.796875,
1942
+ "rewards/chosen": -1.2227095365524292,
1943
+ "rewards/margins": 1.1552302837371826,
1944
+ "rewards/rejected": -2.3779397010803223,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.8967449881439965,
1949
+ "grad_norm": 22.069123537359786,
1950
+ "learning_rate": 1.1570881226053639e-07,
1951
+ "logits/chosen": 0.08069028705358505,
1952
+ "logits/rejected": 0.2447403222322464,
1953
+ "logps/chosen": -219.84442138671875,
1954
+ "logps/rejected": -241.29461669921875,
1955
+ "loss": 0.4417,
1956
+ "rewards/accuracies": 0.792187511920929,
1957
+ "rewards/chosen": -1.110395908355713,
1958
+ "rewards/margins": 1.2345901727676392,
1959
+ "rewards/rejected": -2.3449859619140625,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.903643026514335,
1964
+ "grad_norm": 21.99285086423841,
1965
+ "learning_rate": 1.0804597701149425e-07,
1966
+ "logits/chosen": 0.12903529405593872,
1967
+ "logits/rejected": 0.15087783336639404,
1968
+ "logps/chosen": -215.73480224609375,
1969
+ "logps/rejected": -239.2603759765625,
1970
+ "loss": 0.4529,
1971
+ "rewards/accuracies": 0.7718750238418579,
1972
+ "rewards/chosen": -1.2006369829177856,
1973
+ "rewards/margins": 1.1153340339660645,
1974
+ "rewards/rejected": -2.3159708976745605,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.9105410648846735,
1979
+ "grad_norm": 20.312748964979473,
1980
+ "learning_rate": 1.003831417624521e-07,
1981
+ "logits/chosen": 0.11999164521694183,
1982
+ "logits/rejected": 0.1974225789308548,
1983
+ "logps/chosen": -216.77389526367188,
1984
+ "logps/rejected": -237.3870391845703,
1985
+ "loss": 0.4465,
1986
+ "rewards/accuracies": 0.7906249761581421,
1987
+ "rewards/chosen": -1.1557135581970215,
1988
+ "rewards/margins": 1.1768665313720703,
1989
+ "rewards/rejected": -2.332580089569092,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.9174391032550119,
1994
+ "grad_norm": 25.30850934446684,
1995
+ "learning_rate": 9.272030651340995e-08,
1996
+ "logits/chosen": 0.14219430088996887,
1997
+ "logits/rejected": 0.18175333738327026,
1998
+ "logps/chosen": -215.50270080566406,
1999
+ "logps/rejected": -235.79623413085938,
2000
+ "loss": 0.5007,
2001
+ "rewards/accuracies": 0.7593750357627869,
2002
+ "rewards/chosen": -1.278659701347351,
2003
+ "rewards/margins": 0.9686375260353088,
2004
+ "rewards/rejected": -2.2472972869873047,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9243371416253503,
2009
+ "grad_norm": 23.186087599735348,
2010
+ "learning_rate": 8.505747126436782e-08,
2011
+ "logits/chosen": 0.11075228452682495,
2012
+ "logits/rejected": 0.20603428781032562,
2013
+ "logps/chosen": -210.88516235351562,
2014
+ "logps/rejected": -240.35678100585938,
2015
+ "loss": 0.4432,
2016
+ "rewards/accuracies": 0.7828124761581421,
2017
+ "rewards/chosen": -1.1982022523880005,
2018
+ "rewards/margins": 1.176865577697754,
2019
+ "rewards/rejected": -2.375067949295044,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9312351799956887,
2024
+ "grad_norm": 26.236430191442004,
2025
+ "learning_rate": 7.739463601532567e-08,
2026
+ "logits/chosen": 0.14156416058540344,
2027
+ "logits/rejected": 0.2178121656179428,
2028
+ "logps/chosen": -214.24050903320312,
2029
+ "logps/rejected": -235.45953369140625,
2030
+ "loss": 0.496,
2031
+ "rewards/accuracies": 0.78125,
2032
+ "rewards/chosen": -1.298641562461853,
2033
+ "rewards/margins": 1.0931938886642456,
2034
+ "rewards/rejected": -2.3918354511260986,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.9381332183660271,
2039
+ "grad_norm": 26.761777481619593,
2040
+ "learning_rate": 6.973180076628352e-08,
2041
+ "logits/chosen": 0.09703594446182251,
2042
+ "logits/rejected": 0.19476377964019775,
2043
+ "logps/chosen": -221.1289520263672,
2044
+ "logps/rejected": -242.93923950195312,
2045
+ "loss": 0.4914,
2046
+ "rewards/accuracies": 0.7593749761581421,
2047
+ "rewards/chosen": -1.2596949338912964,
2048
+ "rewards/margins": 1.0118179321289062,
2049
+ "rewards/rejected": -2.271512985229492,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.9450312567363656,
2054
+ "grad_norm": 23.607901076870643,
2055
+ "learning_rate": 6.206896551724137e-08,
2056
+ "logits/chosen": 0.05706113576889038,
2057
+ "logits/rejected": 0.20017537474632263,
2058
+ "logps/chosen": -214.57180786132812,
2059
+ "logps/rejected": -240.35067749023438,
2060
+ "loss": 0.4409,
2061
+ "rewards/accuracies": 0.815625011920929,
2062
+ "rewards/chosen": -1.1195130348205566,
2063
+ "rewards/margins": 1.2302576303482056,
2064
+ "rewards/rejected": -2.3497705459594727,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.951929295106704,
2069
+ "grad_norm": 20.38307949461832,
2070
+ "learning_rate": 5.440613026819923e-08,
2071
+ "logits/chosen": 0.09781169891357422,
2072
+ "logits/rejected": 0.11969345808029175,
2073
+ "logps/chosen": -221.07252502441406,
2074
+ "logps/rejected": -245.23606872558594,
2075
+ "loss": 0.4695,
2076
+ "rewards/accuracies": 0.7796875238418579,
2077
+ "rewards/chosen": -1.2874337434768677,
2078
+ "rewards/margins": 1.1202243566513062,
2079
+ "rewards/rejected": -2.407658100128174,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.9588273334770425,
2084
+ "grad_norm": 25.47882209795097,
2085
+ "learning_rate": 4.674329501915709e-08,
2086
+ "logits/chosen": 0.08676274120807648,
2087
+ "logits/rejected": 0.19397087395191193,
2088
+ "logps/chosen": -214.9215087890625,
2089
+ "logps/rejected": -236.62606811523438,
2090
+ "loss": 0.4592,
2091
+ "rewards/accuracies": 0.7999999523162842,
2092
+ "rewards/chosen": -1.2205379009246826,
2093
+ "rewards/margins": 1.0657426118850708,
2094
+ "rewards/rejected": -2.286280632019043,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9657253718473809,
2099
+ "grad_norm": 22.952653079541584,
2100
+ "learning_rate": 3.908045977011494e-08,
2101
+ "logits/chosen": 0.1025160402059555,
2102
+ "logits/rejected": 0.142715722322464,
2103
+ "logps/chosen": -218.51333618164062,
2104
+ "logps/rejected": -243.1075439453125,
2105
+ "loss": 0.4819,
2106
+ "rewards/accuracies": 0.7718749642372131,
2107
+ "rewards/chosen": -1.2564886808395386,
2108
+ "rewards/margins": 1.0429816246032715,
2109
+ "rewards/rejected": -2.2994704246520996,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9726234102177194,
2114
+ "grad_norm": 20.867881946381864,
2115
+ "learning_rate": 3.1417624521072795e-08,
2116
+ "logits/chosen": 0.05843396484851837,
2117
+ "logits/rejected": 0.12826119363307953,
2118
+ "logps/chosen": -217.50735473632812,
2119
+ "logps/rejected": -244.44912719726562,
2120
+ "loss": 0.4469,
2121
+ "rewards/accuracies": 0.8203125,
2122
+ "rewards/chosen": -1.1856391429901123,
2123
+ "rewards/margins": 1.1579524278640747,
2124
+ "rewards/rejected": -2.3435914516448975,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9795214485880578,
2129
+ "grad_norm": 23.82920991184487,
2130
+ "learning_rate": 2.375478927203065e-08,
2131
+ "logits/chosen": 0.1010110154747963,
2132
+ "logits/rejected": 0.09921743720769882,
2133
+ "logps/chosen": -219.37962341308594,
2134
+ "logps/rejected": -246.5306396484375,
2135
+ "loss": 0.4339,
2136
+ "rewards/accuracies": 0.7921875715255737,
2137
+ "rewards/chosen": -1.140840768814087,
2138
+ "rewards/margins": 1.2549952268600464,
2139
+ "rewards/rejected": -2.395836353302002,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.9864194869583962,
2144
+ "grad_norm": 24.317865057607765,
2145
+ "learning_rate": 1.6091954022988505e-08,
2146
+ "logits/chosen": 0.11954033374786377,
2147
+ "logits/rejected": 0.16331204771995544,
2148
+ "logps/chosen": -205.85574340820312,
2149
+ "logps/rejected": -231.82656860351562,
2150
+ "loss": 0.4703,
2151
+ "rewards/accuracies": 0.7953125238418579,
2152
+ "rewards/chosen": -1.1854947805404663,
2153
+ "rewards/margins": 1.10270094871521,
2154
+ "rewards/rejected": -2.2881956100463867,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.9933175253287346,
2159
+ "grad_norm": 31.018823587254435,
2160
+ "learning_rate": 8.429118773946359e-09,
2161
+ "logits/chosen": 0.10009612143039703,
2162
+ "logits/rejected": 0.17402292788028717,
2163
+ "logps/chosen": -214.16224670410156,
2164
+ "logps/rejected": -239.9188232421875,
2165
+ "loss": 0.4753,
2166
+ "rewards/accuracies": 0.7593750357627869,
2167
+ "rewards/chosen": -1.2428853511810303,
2168
+ "rewards/margins": 1.140334129333496,
2169
+ "rewards/rejected": -2.3832194805145264,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 1.0,
2174
+ "grad_norm": 23.50715246309972,
2175
+ "learning_rate": 7.662835249042145e-10,
2176
+ "logits/chosen": 0.0691741406917572,
2177
+ "logits/rejected": 0.1229172945022583,
2178
+ "logps/chosen": -216.78927612304688,
2179
+ "logps/rejected": -239.19741821289062,
2180
+ "loss": 0.4402,
2181
+ "rewards/accuracies": 0.7774193286895752,
2182
+ "rewards/chosen": -1.1669424772262573,
2183
+ "rewards/margins": 1.1343679428100586,
2184
+ "rewards/rejected": -2.3013103008270264,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 1.0,
2189
+ "step": 1450,
2190
+ "total_flos": 160003043033088.0,
2191
+ "train_loss": 0.538088473287122,
2192
+ "train_runtime": 14420.7196,
2193
+ "train_samples_per_second": 6.434,
2194
+ "train_steps_per_second": 0.101
2195
+ }
2196
+ ],
2197
+ "logging_steps": 10,
2198
+ "max_steps": 1450,
2199
+ "num_input_tokens_seen": 0,
2200
+ "num_train_epochs": 1,
2201
+ "save_steps": 250,
2202
+ "stateful_callbacks": {
2203
+ "TrainerControl": {
2204
+ "args": {
2205
+ "should_epoch_stop": false,
2206
+ "should_evaluate": false,
2207
+ "should_log": false,
2208
+ "should_save": true,
2209
+ "should_training_stop": true
2210
+ },
2211
+ "attributes": {}
2212
+ }
2213
+ },
2214
+ "total_flos": 160003043033088.0,
2215
+ "train_batch_size": 1,
2216
+ "trial_name": null,
2217
+ "trial_params": null
2218
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40241b1160d2ec9ecf5d9f38878b1cd32b946c2a2d458c4571941761fbc2ff78
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED