davidanugraha commited on
Commit
46362ff
·
verified ·
1 Parent(s): 1d1171a

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: meta-llama/Llama-3.2-3B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: helpsteer3_llama32_3b_dpo_skyworkqwen3
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # helpsteer3_llama32_3b_dpo_skyworkqwen3
18
+
19
+ This model is a fine-tuned version of [meta-llama/Llama-3.2-3B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct) on the dpo_helpsteer3_llama32_3b_skyworkqwen3 dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.6.0
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 161167907028992.0,
4
+ "train_loss": 0.47723283336431094,
5
+ "train_runtime": 14257.9418,
6
+ "train_samples_per_second": 6.575,
7
+ "train_steps_per_second": 0.103
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token }}
2
+ {%- if custom_tools is defined %}
3
+ {%- set tools = custom_tools %}
4
+ {%- endif %}
5
+ {%- if not tools_in_user_message is defined %}
6
+ {%- set tools_in_user_message = true %}
7
+ {%- endif %}
8
+ {%- if not date_string is defined %}
9
+ {%- if strftime_now is defined %}
10
+ {%- set date_string = strftime_now("%d %b %Y") %}
11
+ {%- else %}
12
+ {%- set date_string = "26 Jul 2024" %}
13
+ {%- endif %}
14
+ {%- endif %}
15
+ {%- if not tools is defined %}
16
+ {%- set tools = none %}
17
+ {%- endif %}
18
+
19
+ {#- This block extracts the system message, so we can slot it into the right place. #}
20
+ {%- if messages[0]['role'] == 'system' %}
21
+ {%- set system_message = messages[0]['content']|trim %}
22
+ {%- set messages = messages[1:] %}
23
+ {%- else %}
24
+ {%- set system_message = "" %}
25
+ {%- endif %}
26
+
27
+ {#- System message #}
28
+ {{- "<|start_header_id|>system<|end_header_id|>\n\n" }}
29
+ {%- if tools is not none %}
30
+ {{- "Environment: ipython\n" }}
31
+ {%- endif %}
32
+ {{- "Cutting Knowledge Date: December 2023\n" }}
33
+ {{- "Today Date: " + date_string + "\n\n" }}
34
+ {%- if tools is not none and not tools_in_user_message %}
35
+ {{- "You have access to the following functions. To call a function, please respond with JSON for a function call." }}
36
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
37
+ {{- "Do not use variables.\n\n" }}
38
+ {%- for t in tools %}
39
+ {{- t | tojson(indent=4) }}
40
+ {{- "\n\n" }}
41
+ {%- endfor %}
42
+ {%- endif %}
43
+ {{- system_message }}
44
+ {{- "<|eot_id|>" }}
45
+
46
+ {#- Custom tools are passed in a user message with some extra guidance #}
47
+ {%- if tools_in_user_message and not tools is none %}
48
+ {#- Extract the first user message so we can plug it in here #}
49
+ {%- if messages | length != 0 %}
50
+ {%- set first_user_message = messages[0]['content']|trim %}
51
+ {%- set messages = messages[1:] %}
52
+ {%- else %}
53
+ {{- raise_exception("Cannot put tools in the first user message when there's no first user message!") }}
54
+ {%- endif %}
55
+ {{- '<|start_header_id|>user<|end_header_id|>\n\n' -}}
56
+ {{- "Given the following functions, please respond with a JSON for a function call " }}
57
+ {{- "with its proper arguments that best answers the given prompt.\n\n" }}
58
+ {{- 'Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}.' }}
59
+ {{- "Do not use variables.\n\n" }}
60
+ {%- for t in tools %}
61
+ {{- t | tojson(indent=4) }}
62
+ {{- "\n\n" }}
63
+ {%- endfor %}
64
+ {{- first_user_message + "<|eot_id|>"}}
65
+ {%- endif %}
66
+
67
+ {%- for message in messages %}
68
+ {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}
69
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' }}
70
+ {%- elif 'tool_calls' in message %}
71
+ {%- if not message.tool_calls|length == 1 %}
72
+ {{- raise_exception("This model only supports single tool-calls at once!") }}
73
+ {%- endif %}
74
+ {%- set tool_call = message.tool_calls[0].function %}
75
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' -}}
76
+ {{- '{"name": "' + tool_call.name + '", ' }}
77
+ {{- '"parameters": ' }}
78
+ {{- tool_call.arguments | tojson }}
79
+ {{- "}" }}
80
+ {{- "<|eot_id|>" }}
81
+ {%- elif message.role == "tool" or message.role == "ipython" %}
82
+ {{- "<|start_header_id|>ipython<|end_header_id|>\n\n" }}
83
+ {%- if message.content is mapping or message.content is iterable %}
84
+ {{- message.content | tojson }}
85
+ {%- else %}
86
+ {{- message.content }}
87
+ {%- endif %}
88
+ {{- "<|eot_id|>" }}
89
+ {%- endif %}
90
+ {%- endfor %}
91
+ {%- if add_generation_prompt %}
92
+ {{- '<|start_header_id|>assistant<|end_header_id|>\n\n' }}
93
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.52.4",
37
+ "use_cache": false,
38
+ "vocab_size": 128256
39
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.52.4"
12
+ }
pytorch_model-00001-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef23408face34c65b64d37a89b27afa556b67174245ee906545f6d0fadba98ae
3
+ size 4965841415
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:710f5add74bc3f4142ddbea3577175afdc96a91e54949f7d7ece6b1244697d31
3
+ size 1459745184
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6425499648
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00001-of-00002.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
16
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
17
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
18
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
19
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
20
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
21
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
22
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
23
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
24
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
25
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
26
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
27
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
28
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
29
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
30
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
31
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
32
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
33
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
34
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
35
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
36
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
37
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
38
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
39
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
40
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
41
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
42
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
43
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
44
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
45
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
46
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
47
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
48
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
49
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
50
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
51
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
52
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
53
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
54
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
55
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
56
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
57
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
58
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
59
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
60
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
61
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
62
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
63
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
64
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
65
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
66
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
67
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
68
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
69
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
70
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
71
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
72
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
73
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
74
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
75
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
76
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
77
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
78
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
79
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
80
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
81
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
82
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
83
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
84
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
85
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
86
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
87
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
88
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
89
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
90
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
91
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
92
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
93
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
94
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
95
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
96
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
97
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
98
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
99
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
100
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
101
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
102
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
103
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
104
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
105
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
106
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
107
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
108
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
109
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
110
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
111
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
112
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
113
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
114
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
115
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
116
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
117
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
118
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
119
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
120
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
121
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
122
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
123
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
124
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
125
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
126
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
127
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
128
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
129
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
130
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
131
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
132
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
133
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
134
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
135
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
136
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
137
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
138
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
139
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
140
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
141
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
142
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
143
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
144
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
145
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
146
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
147
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
148
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
149
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
150
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
151
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
152
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
153
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
154
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
155
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
156
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
157
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
158
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
159
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
160
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
161
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
162
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
163
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
164
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
165
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
166
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
167
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
168
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
169
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
170
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
171
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
172
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
173
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
174
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
175
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
176
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
177
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
178
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
179
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
180
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
181
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
182
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
183
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
184
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
185
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
186
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
187
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
188
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
189
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
190
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00002-of-00002.bin",
191
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00002-of-00002.bin",
192
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
193
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00002-of-00002.bin",
194
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00002-of-00002.bin",
195
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00002-of-00002.bin",
196
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00002-of-00002.bin",
197
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
198
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
199
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
200
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
201
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
202
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
203
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
204
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
205
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
206
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
207
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
208
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
209
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
210
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
211
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
212
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
213
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
214
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
215
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
216
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
217
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
218
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
219
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
220
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
221
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
222
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
223
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
224
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
225
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
226
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
227
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
228
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
229
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
230
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
231
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
232
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
233
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
234
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
235
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
236
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
237
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
238
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
239
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
240
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
241
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
242
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
243
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
244
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
245
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
246
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
247
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
248
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
249
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
250
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
251
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00002.bin",
252
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00002.bin",
253
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
254
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
255
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00002.bin",
256
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
257
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
258
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
259
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
260
+ "model.norm.weight": "pytorch_model-00002-of-00002.bin"
261
+ }
262
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|eom_id|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|eot_id|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": "<|eot_id|>"
26
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "additional_special_tokens": [
2053
+ "<|eom_id|>"
2054
+ ],
2055
+ "bos_token": "<|begin_of_text|>",
2056
+ "clean_up_tokenization_spaces": true,
2057
+ "eos_token": "<|eot_id|>",
2058
+ "extra_special_tokens": {},
2059
+ "model_input_names": [
2060
+ "input_ids",
2061
+ "attention_mask"
2062
+ ],
2063
+ "model_max_length": 131072,
2064
+ "pad_token": "<|eot_id|>",
2065
+ "padding_side": "right",
2066
+ "split_special_tokens": false,
2067
+ "tokenizer_class": "PreTrainedTokenizer"
2068
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 161167907028992.0,
4
+ "train_loss": 0.47723283336431094,
5
+ "train_runtime": 14257.9418,
6
+ "train_samples_per_second": 6.575,
7
+ "train_steps_per_second": 0.103
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 1465, "loss": 0.6921, "accuracy": 0.41874998807907104, "lr": 6.122448979591837e-08, "epoch": 0.00682681230532918, "percentage": 0.68, "elapsed_time": "0:01:40", "remaining_time": "4:02:36"}
2
+ {"current_steps": 20, "total_steps": 1465, "loss": 0.6937, "accuracy": 0.4859375059604645, "lr": 1.2925170068027211e-07, "epoch": 0.01365362461065836, "percentage": 1.37, "elapsed_time": "0:03:18", "remaining_time": "3:59:12"}
3
+ {"current_steps": 30, "total_steps": 1465, "loss": 0.6936, "accuracy": 0.46875, "lr": 1.9727891156462583e-07, "epoch": 0.02048043691598754, "percentage": 2.05, "elapsed_time": "0:04:57", "remaining_time": "3:57:29"}
4
+ {"current_steps": 40, "total_steps": 1465, "loss": 0.6913, "accuracy": 0.53125, "lr": 2.653061224489796e-07, "epoch": 0.02730724922131672, "percentage": 2.73, "elapsed_time": "0:06:36", "remaining_time": "3:55:42"}
5
+ {"current_steps": 50, "total_steps": 1465, "loss": 0.6942, "accuracy": 0.46406251192092896, "lr": 3.333333333333333e-07, "epoch": 0.0341340615266459, "percentage": 3.41, "elapsed_time": "0:08:14", "remaining_time": "3:53:14"}
6
+ {"current_steps": 60, "total_steps": 1465, "loss": 0.6937, "accuracy": 0.5015624761581421, "lr": 4.0136054421768705e-07, "epoch": 0.04096087383197508, "percentage": 4.1, "elapsed_time": "0:09:51", "remaining_time": "3:50:59"}
7
+ {"current_steps": 70, "total_steps": 1465, "loss": 0.6933, "accuracy": 0.4937499761581421, "lr": 4.693877551020408e-07, "epoch": 0.04778768613730426, "percentage": 4.78, "elapsed_time": "0:11:29", "remaining_time": "3:48:58"}
8
+ {"current_steps": 80, "total_steps": 1465, "loss": 0.6878, "accuracy": 0.6031249761581421, "lr": 5.374149659863945e-07, "epoch": 0.05461449844263344, "percentage": 5.46, "elapsed_time": "0:13:07", "remaining_time": "3:47:07"}
9
+ {"current_steps": 90, "total_steps": 1465, "loss": 0.6876, "accuracy": 0.582812488079071, "lr": 6.054421768707482e-07, "epoch": 0.06144131074796262, "percentage": 6.14, "elapsed_time": "0:14:45", "remaining_time": "3:45:32"}
10
+ {"current_steps": 100, "total_steps": 1465, "loss": 0.6858, "accuracy": 0.5718749761581421, "lr": 6.734693877551019e-07, "epoch": 0.0682681230532918, "percentage": 6.83, "elapsed_time": "0:16:25", "remaining_time": "3:44:09"}
11
+ {"current_steps": 110, "total_steps": 1465, "loss": 0.6817, "accuracy": 0.625, "lr": 7.414965986394558e-07, "epoch": 0.07509493535862098, "percentage": 7.51, "elapsed_time": "0:18:01", "remaining_time": "3:42:04"}
12
+ {"current_steps": 120, "total_steps": 1465, "loss": 0.6828, "accuracy": 0.5843750238418579, "lr": 8.095238095238095e-07, "epoch": 0.08192174766395016, "percentage": 8.19, "elapsed_time": "0:19:39", "remaining_time": "3:40:16"}
13
+ {"current_steps": 130, "total_steps": 1465, "loss": 0.6782, "accuracy": 0.651562511920929, "lr": 8.775510204081632e-07, "epoch": 0.08874855996927934, "percentage": 8.87, "elapsed_time": "0:21:17", "remaining_time": "3:38:43"}
14
+ {"current_steps": 140, "total_steps": 1465, "loss": 0.6721, "accuracy": 0.6499999761581421, "lr": 9.45578231292517e-07, "epoch": 0.09557537227460852, "percentage": 9.56, "elapsed_time": "0:22:57", "remaining_time": "3:37:14"}
15
+ {"current_steps": 150, "total_steps": 1465, "loss": 0.666, "accuracy": 0.6500000357627869, "lr": 9.98482549317147e-07, "epoch": 0.1024021845799377, "percentage": 10.24, "elapsed_time": "0:24:34", "remaining_time": "3:35:30"}
16
+ {"current_steps": 160, "total_steps": 1465, "loss": 0.6654, "accuracy": 0.6500000357627869, "lr": 9.908952959028832e-07, "epoch": 0.10922899688526688, "percentage": 10.92, "elapsed_time": "0:26:11", "remaining_time": "3:33:35"}
17
+ {"current_steps": 170, "total_steps": 1465, "loss": 0.659, "accuracy": 0.6343749761581421, "lr": 9.833080424886191e-07, "epoch": 0.11605580919059606, "percentage": 11.6, "elapsed_time": "0:27:48", "remaining_time": "3:31:46"}
18
+ {"current_steps": 180, "total_steps": 1465, "loss": 0.6528, "accuracy": 0.6749999523162842, "lr": 9.75720789074355e-07, "epoch": 0.12288262149592524, "percentage": 12.29, "elapsed_time": "0:29:26", "remaining_time": "3:30:11"}
19
+ {"current_steps": 190, "total_steps": 1465, "loss": 0.6441, "accuracy": 0.6812500357627869, "lr": 9.68133535660091e-07, "epoch": 0.12970943380125444, "percentage": 12.97, "elapsed_time": "0:31:03", "remaining_time": "3:28:23"}
20
+ {"current_steps": 200, "total_steps": 1465, "loss": 0.6249, "accuracy": 0.7046875357627869, "lr": 9.60546282245827e-07, "epoch": 0.1365362461065836, "percentage": 13.65, "elapsed_time": "0:32:41", "remaining_time": "3:26:43"}
21
+ {"current_steps": 210, "total_steps": 1465, "loss": 0.6287, "accuracy": 0.6687500476837158, "lr": 9.52959028831563e-07, "epoch": 0.1433630584119128, "percentage": 14.33, "elapsed_time": "0:34:21", "remaining_time": "3:25:16"}
22
+ {"current_steps": 220, "total_steps": 1465, "loss": 0.6015, "accuracy": 0.7109375, "lr": 9.453717754172988e-07, "epoch": 0.15018987071724196, "percentage": 15.02, "elapsed_time": "0:35:59", "remaining_time": "3:23:42"}
23
+ {"current_steps": 230, "total_steps": 1465, "loss": 0.611, "accuracy": 0.6734374761581421, "lr": 9.377845220030348e-07, "epoch": 0.15701668302257116, "percentage": 15.7, "elapsed_time": "0:37:40", "remaining_time": "3:22:19"}
24
+ {"current_steps": 240, "total_steps": 1465, "loss": 0.6125, "accuracy": 0.690625011920929, "lr": 9.301972685887707e-07, "epoch": 0.16384349532790032, "percentage": 16.38, "elapsed_time": "0:39:18", "remaining_time": "3:20:36"}
25
+ {"current_steps": 250, "total_steps": 1465, "loss": 0.6205, "accuracy": 0.6546875238418579, "lr": 9.226100151745068e-07, "epoch": 0.17067030763322952, "percentage": 17.06, "elapsed_time": "0:40:59", "remaining_time": "3:19:12"}
26
+ {"current_steps": 260, "total_steps": 1465, "loss": 0.6056, "accuracy": 0.7046875357627869, "lr": 9.150227617602428e-07, "epoch": 0.17749711993855868, "percentage": 17.75, "elapsed_time": "0:42:37", "remaining_time": "3:17:33"}
27
+ {"current_steps": 270, "total_steps": 1465, "loss": 0.595, "accuracy": 0.7078125476837158, "lr": 9.074355083459787e-07, "epoch": 0.18432393224388788, "percentage": 18.43, "elapsed_time": "0:44:16", "remaining_time": "3:15:55"}
28
+ {"current_steps": 280, "total_steps": 1465, "loss": 0.589, "accuracy": 0.7000000476837158, "lr": 8.998482549317147e-07, "epoch": 0.19115074454921704, "percentage": 19.11, "elapsed_time": "0:45:52", "remaining_time": "3:14:07"}
29
+ {"current_steps": 290, "total_steps": 1465, "loss": 0.59, "accuracy": 0.7046875357627869, "lr": 8.922610015174506e-07, "epoch": 0.19797755685454624, "percentage": 19.8, "elapsed_time": "0:47:31", "remaining_time": "3:12:32"}
30
+ {"current_steps": 300, "total_steps": 1465, "loss": 0.6111, "accuracy": 0.715624988079071, "lr": 8.846737481031866e-07, "epoch": 0.2048043691598754, "percentage": 20.48, "elapsed_time": "0:49:10", "remaining_time": "3:10:55"}
31
+ {"current_steps": 310, "total_steps": 1465, "loss": 0.5794, "accuracy": 0.7265625, "lr": 8.770864946889226e-07, "epoch": 0.2116311814652046, "percentage": 21.16, "elapsed_time": "0:50:49", "remaining_time": "3:09:23"}
32
+ {"current_steps": 320, "total_steps": 1465, "loss": 0.5727, "accuracy": 0.7281250357627869, "lr": 8.694992412746586e-07, "epoch": 0.21845799377053376, "percentage": 21.84, "elapsed_time": "0:52:26", "remaining_time": "3:07:37"}
33
+ {"current_steps": 330, "total_steps": 1465, "loss": 0.5913, "accuracy": 0.6703125238418579, "lr": 8.619119878603945e-07, "epoch": 0.22528480607586296, "percentage": 22.53, "elapsed_time": "0:54:03", "remaining_time": "3:05:55"}
34
+ {"current_steps": 340, "total_steps": 1465, "loss": 0.56, "accuracy": 0.7328125238418579, "lr": 8.543247344461305e-07, "epoch": 0.23211161838119213, "percentage": 23.21, "elapsed_time": "0:55:43", "remaining_time": "3:04:24"}
35
+ {"current_steps": 350, "total_steps": 1465, "loss": 0.5515, "accuracy": 0.706250011920929, "lr": 8.467374810318663e-07, "epoch": 0.23893843068652132, "percentage": 23.89, "elapsed_time": "0:57:20", "remaining_time": "3:02:40"}
36
+ {"current_steps": 360, "total_steps": 1465, "loss": 0.5809, "accuracy": 0.7046875357627869, "lr": 8.391502276176023e-07, "epoch": 0.24576524299185049, "percentage": 24.57, "elapsed_time": "0:58:57", "remaining_time": "3:00:58"}
37
+ {"current_steps": 370, "total_steps": 1465, "loss": 0.5675, "accuracy": 0.7234375476837158, "lr": 8.315629742033384e-07, "epoch": 0.25259205529717965, "percentage": 25.26, "elapsed_time": "1:00:35", "remaining_time": "2:59:19"}
38
+ {"current_steps": 380, "total_steps": 1465, "loss": 0.5414, "accuracy": 0.7265625, "lr": 8.239757207890743e-07, "epoch": 0.2594188676025089, "percentage": 25.94, "elapsed_time": "1:02:13", "remaining_time": "2:57:40"}
39
+ {"current_steps": 390, "total_steps": 1465, "loss": 0.532, "accuracy": 0.7343750596046448, "lr": 8.163884673748103e-07, "epoch": 0.26624567990783804, "percentage": 26.62, "elapsed_time": "1:03:49", "remaining_time": "2:55:56"}
40
+ {"current_steps": 400, "total_steps": 1465, "loss": 0.5272, "accuracy": 0.737500011920929, "lr": 8.088012139605462e-07, "epoch": 0.2730724922131672, "percentage": 27.3, "elapsed_time": "1:05:25", "remaining_time": "2:54:12"}
41
+ {"current_steps": 410, "total_steps": 1465, "loss": 0.5575, "accuracy": 0.721875011920929, "lr": 8.012139605462822e-07, "epoch": 0.2798993045184964, "percentage": 27.99, "elapsed_time": "1:07:03", "remaining_time": "2:52:33"}
42
+ {"current_steps": 420, "total_steps": 1465, "loss": 0.5265, "accuracy": 0.7484375238418579, "lr": 7.936267071320181e-07, "epoch": 0.2867261168238256, "percentage": 28.67, "elapsed_time": "1:08:39", "remaining_time": "2:50:50"}
43
+ {"current_steps": 430, "total_steps": 1465, "loss": 0.5223, "accuracy": 0.7421875596046448, "lr": 7.860394537177542e-07, "epoch": 0.29355292912915476, "percentage": 29.35, "elapsed_time": "1:10:18", "remaining_time": "2:49:13"}
44
+ {"current_steps": 440, "total_steps": 1465, "loss": 0.5055, "accuracy": 0.7765625715255737, "lr": 7.784522003034901e-07, "epoch": 0.3003797414344839, "percentage": 30.03, "elapsed_time": "1:11:55", "remaining_time": "2:47:32"}
45
+ {"current_steps": 450, "total_steps": 1465, "loss": 0.5226, "accuracy": 0.746874988079071, "lr": 7.708649468892261e-07, "epoch": 0.3072065537398131, "percentage": 30.72, "elapsed_time": "1:13:34", "remaining_time": "2:45:56"}
46
+ {"current_steps": 460, "total_steps": 1465, "loss": 0.5013, "accuracy": 0.765625, "lr": 7.632776934749621e-07, "epoch": 0.3140333660451423, "percentage": 31.4, "elapsed_time": "1:15:11", "remaining_time": "2:44:16"}
47
+ {"current_steps": 470, "total_steps": 1465, "loss": 0.4896, "accuracy": 0.7671874761581421, "lr": 7.55690440060698e-07, "epoch": 0.3208601783504715, "percentage": 32.08, "elapsed_time": "1:16:48", "remaining_time": "2:42:35"}
48
+ {"current_steps": 480, "total_steps": 1465, "loss": 0.5178, "accuracy": 0.7406250238418579, "lr": 7.481031866464339e-07, "epoch": 0.32768699065580065, "percentage": 32.76, "elapsed_time": "1:18:23", "remaining_time": "2:40:52"}
49
+ {"current_steps": 490, "total_steps": 1465, "loss": 0.5155, "accuracy": 0.7484375238418579, "lr": 7.405159332321699e-07, "epoch": 0.3345138029611298, "percentage": 33.45, "elapsed_time": "1:20:04", "remaining_time": "2:39:19"}
50
+ {"current_steps": 500, "total_steps": 1465, "loss": 0.5274, "accuracy": 0.7328125238418579, "lr": 7.329286798179059e-07, "epoch": 0.34134061526645904, "percentage": 34.13, "elapsed_time": "1:21:42", "remaining_time": "2:37:41"}
51
+ {"current_steps": 510, "total_steps": 1465, "loss": 0.4918, "accuracy": 0.7640624642372131, "lr": 7.253414264036418e-07, "epoch": 0.3481674275717882, "percentage": 34.81, "elapsed_time": "1:23:43", "remaining_time": "2:36:47"}
52
+ {"current_steps": 520, "total_steps": 1465, "loss": 0.5137, "accuracy": 0.75, "lr": 7.177541729893778e-07, "epoch": 0.35499423987711737, "percentage": 35.49, "elapsed_time": "1:25:21", "remaining_time": "2:35:06"}
53
+ {"current_steps": 530, "total_steps": 1465, "loss": 0.5059, "accuracy": 0.7500000596046448, "lr": 7.101669195751137e-07, "epoch": 0.36182105218244653, "percentage": 36.18, "elapsed_time": "1:26:58", "remaining_time": "2:33:26"}
54
+ {"current_steps": 540, "total_steps": 1465, "loss": 0.5282, "accuracy": 0.745312511920929, "lr": 7.025796661608497e-07, "epoch": 0.36864786448777576, "percentage": 36.86, "elapsed_time": "1:28:34", "remaining_time": "2:31:43"}
55
+ {"current_steps": 550, "total_steps": 1465, "loss": 0.5021, "accuracy": 0.7703125476837158, "lr": 6.949924127465857e-07, "epoch": 0.3754746767931049, "percentage": 37.54, "elapsed_time": "1:30:11", "remaining_time": "2:30:02"}
56
+ {"current_steps": 560, "total_steps": 1465, "loss": 0.4667, "accuracy": 0.770312488079071, "lr": 6.874051593323217e-07, "epoch": 0.3823014890984341, "percentage": 38.23, "elapsed_time": "1:31:47", "remaining_time": "2:28:20"}
57
+ {"current_steps": 570, "total_steps": 1465, "loss": 0.5004, "accuracy": 0.7593750357627869, "lr": 6.798179059180577e-07, "epoch": 0.38912830140376325, "percentage": 38.91, "elapsed_time": "1:33:22", "remaining_time": "2:26:37"}
58
+ {"current_steps": 580, "total_steps": 1465, "loss": 0.497, "accuracy": 0.768750011920929, "lr": 6.722306525037936e-07, "epoch": 0.3959551137090925, "percentage": 39.59, "elapsed_time": "1:35:00", "remaining_time": "2:24:57"}
59
+ {"current_steps": 590, "total_steps": 1465, "loss": 0.472, "accuracy": 0.7718750238418579, "lr": 6.646433990895296e-07, "epoch": 0.40278192601442164, "percentage": 40.27, "elapsed_time": "1:36:34", "remaining_time": "2:23:13"}
60
+ {"current_steps": 600, "total_steps": 1465, "loss": 0.4551, "accuracy": 0.776562511920929, "lr": 6.570561456752655e-07, "epoch": 0.4096087383197508, "percentage": 40.96, "elapsed_time": "1:38:11", "remaining_time": "2:21:33"}
61
+ {"current_steps": 610, "total_steps": 1465, "loss": 0.4563, "accuracy": 0.770312488079071, "lr": 6.494688922610015e-07, "epoch": 0.41643555062508, "percentage": 41.64, "elapsed_time": "1:39:47", "remaining_time": "2:19:52"}
62
+ {"current_steps": 620, "total_steps": 1465, "loss": 0.4778, "accuracy": 0.7906250357627869, "lr": 6.418816388467374e-07, "epoch": 0.4232623629304092, "percentage": 42.32, "elapsed_time": "1:41:24", "remaining_time": "2:18:12"}
63
+ {"current_steps": 630, "total_steps": 1465, "loss": 0.4689, "accuracy": 0.78125, "lr": 6.342943854324734e-07, "epoch": 0.43008917523573836, "percentage": 43.0, "elapsed_time": "1:43:02", "remaining_time": "2:16:33"}
64
+ {"current_steps": 640, "total_steps": 1465, "loss": 0.4635, "accuracy": 0.7875000238418579, "lr": 6.267071320182093e-07, "epoch": 0.43691598754106753, "percentage": 43.69, "elapsed_time": "1:44:41", "remaining_time": "2:14:56"}
65
+ {"current_steps": 650, "total_steps": 1465, "loss": 0.4646, "accuracy": 0.770312488079071, "lr": 6.191198786039453e-07, "epoch": 0.4437427998463967, "percentage": 44.37, "elapsed_time": "1:46:17", "remaining_time": "2:13:16"}
66
+ {"current_steps": 660, "total_steps": 1465, "loss": 0.5188, "accuracy": 0.7671874761581421, "lr": 6.115326251896813e-07, "epoch": 0.4505696121517259, "percentage": 45.05, "elapsed_time": "1:47:53", "remaining_time": "2:11:35"}
67
+ {"current_steps": 670, "total_steps": 1465, "loss": 0.4953, "accuracy": 0.7437499761581421, "lr": 6.039453717754173e-07, "epoch": 0.4573964244570551, "percentage": 45.73, "elapsed_time": "1:49:28", "remaining_time": "2:09:54"}
68
+ {"current_steps": 680, "total_steps": 1465, "loss": 0.4608, "accuracy": 0.7781250476837158, "lr": 5.963581183611533e-07, "epoch": 0.46422323676238425, "percentage": 46.42, "elapsed_time": "1:51:05", "remaining_time": "2:08:14"}
69
+ {"current_steps": 690, "total_steps": 1465, "loss": 0.4352, "accuracy": 0.792187511920929, "lr": 5.887708649468892e-07, "epoch": 0.47105004906771347, "percentage": 47.1, "elapsed_time": "1:52:41", "remaining_time": "2:06:34"}
70
+ {"current_steps": 700, "total_steps": 1465, "loss": 0.4534, "accuracy": 0.7812500596046448, "lr": 5.811836115326252e-07, "epoch": 0.47787686137304264, "percentage": 47.78, "elapsed_time": "1:54:21", "remaining_time": "2:04:58"}
71
+ {"current_steps": 710, "total_steps": 1465, "loss": 0.4487, "accuracy": 0.7828124761581421, "lr": 5.735963581183611e-07, "epoch": 0.4847036736783718, "percentage": 48.46, "elapsed_time": "1:55:55", "remaining_time": "2:03:16"}
72
+ {"current_steps": 720, "total_steps": 1465, "loss": 0.4743, "accuracy": 0.770312488079071, "lr": 5.660091047040971e-07, "epoch": 0.49153048598370097, "percentage": 49.15, "elapsed_time": "1:57:33", "remaining_time": "2:01:37"}
73
+ {"current_steps": 730, "total_steps": 1465, "loss": 0.4451, "accuracy": 0.8125, "lr": 5.584218512898331e-07, "epoch": 0.4983572982890302, "percentage": 49.83, "elapsed_time": "1:59:09", "remaining_time": "1:59:58"}
74
+ {"current_steps": 740, "total_steps": 1465, "loss": 0.4356, "accuracy": 0.796875, "lr": 5.508345978755691e-07, "epoch": 0.5051841105943593, "percentage": 50.51, "elapsed_time": "2:00:45", "remaining_time": "1:58:18"}
75
+ {"current_steps": 750, "total_steps": 1465, "loss": 0.4433, "accuracy": 0.8046875596046448, "lr": 5.432473444613049e-07, "epoch": 0.5120109228996885, "percentage": 51.19, "elapsed_time": "2:02:23", "remaining_time": "1:56:40"}
76
+ {"current_steps": 760, "total_steps": 1465, "loss": 0.4236, "accuracy": 0.800000011920929, "lr": 5.356600910470409e-07, "epoch": 0.5188377352050177, "percentage": 51.88, "elapsed_time": "2:04:01", "remaining_time": "1:55:03"}
77
+ {"current_steps": 770, "total_steps": 1465, "loss": 0.4347, "accuracy": 0.8109375238418579, "lr": 5.280728376327769e-07, "epoch": 0.5256645475103469, "percentage": 52.56, "elapsed_time": "2:05:38", "remaining_time": "1:53:23"}
78
+ {"current_steps": 780, "total_steps": 1465, "loss": 0.4609, "accuracy": 0.7703125476837158, "lr": 5.204855842185128e-07, "epoch": 0.5324913598156761, "percentage": 53.24, "elapsed_time": "2:07:14", "remaining_time": "1:51:44"}
79
+ {"current_steps": 790, "total_steps": 1465, "loss": 0.4193, "accuracy": 0.8093750476837158, "lr": 5.128983308042489e-07, "epoch": 0.5393181721210053, "percentage": 53.92, "elapsed_time": "2:08:48", "remaining_time": "1:50:03"}
80
+ {"current_steps": 800, "total_steps": 1465, "loss": 0.424, "accuracy": 0.8046875, "lr": 5.053110773899848e-07, "epoch": 0.5461449844263344, "percentage": 54.61, "elapsed_time": "2:10:25", "remaining_time": "1:48:25"}
81
+ {"current_steps": 810, "total_steps": 1465, "loss": 0.4266, "accuracy": 0.7984375357627869, "lr": 4.977238239757208e-07, "epoch": 0.5529717967316636, "percentage": 55.29, "elapsed_time": "2:12:02", "remaining_time": "1:46:46"}
82
+ {"current_steps": 820, "total_steps": 1465, "loss": 0.4526, "accuracy": 0.796875, "lr": 4.901365705614567e-07, "epoch": 0.5597986090369927, "percentage": 55.97, "elapsed_time": "2:13:38", "remaining_time": "1:45:07"}
83
+ {"current_steps": 830, "total_steps": 1465, "loss": 0.4189, "accuracy": 0.815625011920929, "lr": 4.825493171471927e-07, "epoch": 0.566625421342322, "percentage": 56.66, "elapsed_time": "2:15:15", "remaining_time": "1:43:28"}
84
+ {"current_steps": 840, "total_steps": 1465, "loss": 0.3916, "accuracy": 0.8375000357627869, "lr": 4.7496206373292864e-07, "epoch": 0.5734522336476512, "percentage": 57.34, "elapsed_time": "2:16:51", "remaining_time": "1:41:49"}
85
+ {"current_steps": 850, "total_steps": 1465, "loss": 0.394, "accuracy": 0.8218750357627869, "lr": 4.673748103186646e-07, "epoch": 0.5802790459529803, "percentage": 58.02, "elapsed_time": "2:18:25", "remaining_time": "1:40:09"}
86
+ {"current_steps": 860, "total_steps": 1465, "loss": 0.4269, "accuracy": 0.801562488079071, "lr": 4.597875569044006e-07, "epoch": 0.5871058582583095, "percentage": 58.7, "elapsed_time": "2:20:01", "remaining_time": "1:38:30"}
87
+ {"current_steps": 870, "total_steps": 1465, "loss": 0.4296, "accuracy": 0.7921874523162842, "lr": 4.5220030349013654e-07, "epoch": 0.5939326705636387, "percentage": 59.39, "elapsed_time": "2:21:37", "remaining_time": "1:36:51"}
88
+ {"current_steps": 880, "total_steps": 1465, "loss": 0.4233, "accuracy": 0.817187488079071, "lr": 4.446130500758725e-07, "epoch": 0.6007594828689679, "percentage": 60.07, "elapsed_time": "2:23:13", "remaining_time": "1:35:12"}
89
+ {"current_steps": 890, "total_steps": 1465, "loss": 0.4612, "accuracy": 0.7812500596046448, "lr": 4.370257966616085e-07, "epoch": 0.6075862951742971, "percentage": 60.75, "elapsed_time": "2:24:50", "remaining_time": "1:33:34"}
90
+ {"current_steps": 900, "total_steps": 1465, "loss": 0.4007, "accuracy": 0.828125, "lr": 4.2943854324734444e-07, "epoch": 0.6144131074796262, "percentage": 61.43, "elapsed_time": "2:26:27", "remaining_time": "1:31:56"}
91
+ {"current_steps": 910, "total_steps": 1465, "loss": 0.4185, "accuracy": 0.8265625238418579, "lr": 4.2185128983308036e-07, "epoch": 0.6212399197849554, "percentage": 62.12, "elapsed_time": "2:28:02", "remaining_time": "1:30:17"}
92
+ {"current_steps": 920, "total_steps": 1465, "loss": 0.4194, "accuracy": 0.817187488079071, "lr": 4.142640364188164e-07, "epoch": 0.6280667320902846, "percentage": 62.8, "elapsed_time": "2:29:38", "remaining_time": "1:28:38"}
93
+ {"current_steps": 930, "total_steps": 1465, "loss": 0.3631, "accuracy": 0.8765624761581421, "lr": 4.0667678300455234e-07, "epoch": 0.6348935443956137, "percentage": 63.48, "elapsed_time": "2:31:13", "remaining_time": "1:26:59"}
94
+ {"current_steps": 940, "total_steps": 1465, "loss": 0.397, "accuracy": 0.8250000476837158, "lr": 3.990895295902883e-07, "epoch": 0.641720356700943, "percentage": 64.16, "elapsed_time": "2:32:48", "remaining_time": "1:25:20"}
95
+ {"current_steps": 950, "total_steps": 1465, "loss": 0.4001, "accuracy": 0.8187500238418579, "lr": 3.915022761760243e-07, "epoch": 0.6485471690062722, "percentage": 64.85, "elapsed_time": "2:34:22", "remaining_time": "1:23:41"}
96
+ {"current_steps": 960, "total_steps": 1465, "loss": 0.3891, "accuracy": 0.817187488079071, "lr": 3.8391502276176024e-07, "epoch": 0.6553739813116013, "percentage": 65.53, "elapsed_time": "2:35:58", "remaining_time": "1:22:02"}
97
+ {"current_steps": 970, "total_steps": 1465, "loss": 0.4196, "accuracy": 0.8125, "lr": 3.763277693474962e-07, "epoch": 0.6622007936169305, "percentage": 66.21, "elapsed_time": "2:37:33", "remaining_time": "1:20:24"}
98
+ {"current_steps": 980, "total_steps": 1465, "loss": 0.4144, "accuracy": 0.815625011920929, "lr": 3.687405159332321e-07, "epoch": 0.6690276059222596, "percentage": 66.89, "elapsed_time": "2:39:09", "remaining_time": "1:18:45"}
99
+ {"current_steps": 990, "total_steps": 1465, "loss": 0.3983, "accuracy": 0.8218750357627869, "lr": 3.611532625189681e-07, "epoch": 0.6758544182275888, "percentage": 67.58, "elapsed_time": "2:40:44", "remaining_time": "1:17:07"}
100
+ {"current_steps": 1000, "total_steps": 1465, "loss": 0.3827, "accuracy": 0.8437500596046448, "lr": 3.5356600910470406e-07, "epoch": 0.6826812305329181, "percentage": 68.26, "elapsed_time": "2:42:22", "remaining_time": "1:15:30"}
101
+ {"current_steps": 1010, "total_steps": 1465, "loss": 0.4033, "accuracy": 0.8296875357627869, "lr": 3.459787556904401e-07, "epoch": 0.6895080428382472, "percentage": 68.94, "elapsed_time": "2:44:23", "remaining_time": "1:14:03"}
102
+ {"current_steps": 1020, "total_steps": 1465, "loss": 0.3898, "accuracy": 0.8406250476837158, "lr": 3.3839150227617604e-07, "epoch": 0.6963348551435764, "percentage": 69.62, "elapsed_time": "2:45:59", "remaining_time": "1:12:25"}
103
+ {"current_steps": 1030, "total_steps": 1465, "loss": 0.396, "accuracy": 0.8171875476837158, "lr": 3.30804248861912e-07, "epoch": 0.7031616674489056, "percentage": 70.31, "elapsed_time": "2:47:36", "remaining_time": "1:10:47"}
104
+ {"current_steps": 1040, "total_steps": 1465, "loss": 0.3637, "accuracy": 0.8421875238418579, "lr": 3.232169954476479e-07, "epoch": 0.7099884797542347, "percentage": 70.99, "elapsed_time": "2:49:13", "remaining_time": "1:09:09"}
105
+ {"current_steps": 1050, "total_steps": 1465, "loss": 0.3636, "accuracy": 0.8374999761581421, "lr": 3.156297420333839e-07, "epoch": 0.716815292059564, "percentage": 71.67, "elapsed_time": "2:50:49", "remaining_time": "1:07:31"}
106
+ {"current_steps": 1060, "total_steps": 1465, "loss": 0.393, "accuracy": 0.831250011920929, "lr": 3.0804248861911986e-07, "epoch": 0.7236421043648931, "percentage": 72.35, "elapsed_time": "2:52:25", "remaining_time": "1:05:52"}
107
+ {"current_steps": 1070, "total_steps": 1465, "loss": 0.3562, "accuracy": 0.8500000238418579, "lr": 3.004552352048558e-07, "epoch": 0.7304689166702223, "percentage": 73.04, "elapsed_time": "2:53:58", "remaining_time": "1:04:13"}
108
+ {"current_steps": 1080, "total_steps": 1465, "loss": 0.3889, "accuracy": 0.832812488079071, "lr": 2.928679817905918e-07, "epoch": 0.7372957289755515, "percentage": 73.72, "elapsed_time": "2:55:34", "remaining_time": "1:02:35"}
109
+ {"current_steps": 1090, "total_steps": 1465, "loss": 0.3594, "accuracy": 0.854687511920929, "lr": 2.8528072837632776e-07, "epoch": 0.7441225412808806, "percentage": 74.4, "elapsed_time": "2:57:09", "remaining_time": "1:00:57"}
110
+ {"current_steps": 1100, "total_steps": 1465, "loss": 0.4057, "accuracy": 0.8203125, "lr": 2.776934749620637e-07, "epoch": 0.7509493535862098, "percentage": 75.09, "elapsed_time": "2:58:49", "remaining_time": "0:59:20"}
111
+ {"current_steps": 1110, "total_steps": 1465, "loss": 0.4044, "accuracy": 0.8140624761581421, "lr": 2.7010622154779964e-07, "epoch": 0.7577761658915391, "percentage": 75.77, "elapsed_time": "3:00:26", "remaining_time": "0:57:42"}
112
+ {"current_steps": 1120, "total_steps": 1465, "loss": 0.3858, "accuracy": 0.8187500238418579, "lr": 2.6251896813353566e-07, "epoch": 0.7646029781968682, "percentage": 76.45, "elapsed_time": "3:02:02", "remaining_time": "0:56:04"}
113
+ {"current_steps": 1130, "total_steps": 1465, "loss": 0.3663, "accuracy": 0.8453125357627869, "lr": 2.549317147192716e-07, "epoch": 0.7714297905021974, "percentage": 77.13, "elapsed_time": "3:03:38", "remaining_time": "0:54:26"}
114
+ {"current_steps": 1140, "total_steps": 1465, "loss": 0.3806, "accuracy": 0.8328125476837158, "lr": 2.473444613050076e-07, "epoch": 0.7782566028075265, "percentage": 77.82, "elapsed_time": "3:05:15", "remaining_time": "0:52:48"}
115
+ {"current_steps": 1150, "total_steps": 1465, "loss": 0.3998, "accuracy": 0.8328125476837158, "lr": 2.3975720789074356e-07, "epoch": 0.7850834151128557, "percentage": 78.5, "elapsed_time": "3:06:51", "remaining_time": "0:51:10"}
116
+ {"current_steps": 1160, "total_steps": 1465, "loss": 0.3717, "accuracy": 0.8250000476837158, "lr": 2.321699544764795e-07, "epoch": 0.791910227418185, "percentage": 79.18, "elapsed_time": "3:08:26", "remaining_time": "0:49:32"}
117
+ {"current_steps": 1170, "total_steps": 1465, "loss": 0.3732, "accuracy": 0.8296875357627869, "lr": 2.2458270106221546e-07, "epoch": 0.7987370397235141, "percentage": 79.86, "elapsed_time": "3:10:02", "remaining_time": "0:47:54"}
118
+ {"current_steps": 1180, "total_steps": 1465, "loss": 0.4147, "accuracy": 0.828125, "lr": 2.1699544764795143e-07, "epoch": 0.8055638520288433, "percentage": 80.55, "elapsed_time": "3:11:35", "remaining_time": "0:46:16"}
119
+ {"current_steps": 1190, "total_steps": 1465, "loss": 0.3354, "accuracy": 0.8609375357627869, "lr": 2.094081942336874e-07, "epoch": 0.8123906643341725, "percentage": 81.23, "elapsed_time": "3:13:11", "remaining_time": "0:44:38"}
120
+ {"current_steps": 1200, "total_steps": 1465, "loss": 0.3821, "accuracy": 0.839062511920929, "lr": 2.0182094081942336e-07, "epoch": 0.8192174766395016, "percentage": 81.91, "elapsed_time": "3:14:49", "remaining_time": "0:43:01"}
121
+ {"current_steps": 1210, "total_steps": 1465, "loss": 0.3658, "accuracy": 0.8531250357627869, "lr": 1.9423368740515933e-07, "epoch": 0.8260442889448308, "percentage": 82.59, "elapsed_time": "3:16:25", "remaining_time": "0:41:23"}
122
+ {"current_steps": 1220, "total_steps": 1465, "loss": 0.392, "accuracy": 0.8234375715255737, "lr": 1.8664643399089527e-07, "epoch": 0.83287110125016, "percentage": 83.28, "elapsed_time": "3:18:00", "remaining_time": "0:39:45"}
123
+ {"current_steps": 1230, "total_steps": 1465, "loss": 0.3747, "accuracy": 0.8484375476837158, "lr": 1.7905918057663124e-07, "epoch": 0.8396979135554892, "percentage": 83.96, "elapsed_time": "3:19:36", "remaining_time": "0:38:08"}
124
+ {"current_steps": 1240, "total_steps": 1465, "loss": 0.374, "accuracy": 0.8343750238418579, "lr": 1.7147192716236723e-07, "epoch": 0.8465247258608184, "percentage": 84.64, "elapsed_time": "3:21:11", "remaining_time": "0:36:30"}
125
+ {"current_steps": 1250, "total_steps": 1465, "loss": 0.3643, "accuracy": 0.8421875238418579, "lr": 1.638846737481032e-07, "epoch": 0.8533515381661475, "percentage": 85.32, "elapsed_time": "3:22:48", "remaining_time": "0:34:52"}
126
+ {"current_steps": 1260, "total_steps": 1465, "loss": 0.3464, "accuracy": 0.8515625, "lr": 1.5629742033383914e-07, "epoch": 0.8601783504714767, "percentage": 86.01, "elapsed_time": "3:24:25", "remaining_time": "0:33:15"}
127
+ {"current_steps": 1270, "total_steps": 1465, "loss": 0.3718, "accuracy": 0.8359375, "lr": 1.487101669195751e-07, "epoch": 0.867005162776806, "percentage": 86.69, "elapsed_time": "3:26:02", "remaining_time": "0:31:38"}
128
+ {"current_steps": 1280, "total_steps": 1465, "loss": 0.3686, "accuracy": 0.8359375, "lr": 1.4112291350531107e-07, "epoch": 0.8738319750821351, "percentage": 87.37, "elapsed_time": "3:27:38", "remaining_time": "0:30:00"}
129
+ {"current_steps": 1290, "total_steps": 1465, "loss": 0.3534, "accuracy": 0.864062488079071, "lr": 1.3353566009104704e-07, "epoch": 0.8806587873874643, "percentage": 88.05, "elapsed_time": "3:29:15", "remaining_time": "0:28:23"}
130
+ {"current_steps": 1300, "total_steps": 1465, "loss": 0.3575, "accuracy": 0.8406250476837158, "lr": 1.25948406676783e-07, "epoch": 0.8874855996927934, "percentage": 88.74, "elapsed_time": "3:30:51", "remaining_time": "0:26:45"}
131
+ {"current_steps": 1310, "total_steps": 1465, "loss": 0.3353, "accuracy": 0.8593750596046448, "lr": 1.1836115326251896e-07, "epoch": 0.8943124119981226, "percentage": 89.42, "elapsed_time": "3:32:26", "remaining_time": "0:25:08"}
132
+ {"current_steps": 1320, "total_steps": 1465, "loss": 0.3514, "accuracy": 0.859375, "lr": 1.1077389984825493e-07, "epoch": 0.9011392243034518, "percentage": 90.1, "elapsed_time": "3:34:05", "remaining_time": "0:23:31"}
133
+ {"current_steps": 1330, "total_steps": 1465, "loss": 0.3729, "accuracy": 0.856249988079071, "lr": 1.0318664643399089e-07, "epoch": 0.907966036608781, "percentage": 90.78, "elapsed_time": "3:35:42", "remaining_time": "0:21:53"}
134
+ {"current_steps": 1340, "total_steps": 1465, "loss": 0.3775, "accuracy": 0.8421875238418579, "lr": 9.559939301972686e-08, "epoch": 0.9147928489141102, "percentage": 91.47, "elapsed_time": "3:37:18", "remaining_time": "0:20:16"}
135
+ {"current_steps": 1350, "total_steps": 1465, "loss": 0.3461, "accuracy": 0.856249988079071, "lr": 8.801213960546281e-08, "epoch": 0.9216196612194394, "percentage": 92.15, "elapsed_time": "3:38:53", "remaining_time": "0:18:38"}
136
+ {"current_steps": 1360, "total_steps": 1465, "loss": 0.4065, "accuracy": 0.8218750357627869, "lr": 8.042488619119878e-08, "epoch": 0.9284464735247685, "percentage": 92.83, "elapsed_time": "3:40:28", "remaining_time": "0:17:01"}
137
+ {"current_steps": 1370, "total_steps": 1465, "loss": 0.3465, "accuracy": 0.8531249761581421, "lr": 7.283763277693475e-08, "epoch": 0.9352732858300977, "percentage": 93.52, "elapsed_time": "3:42:03", "remaining_time": "0:15:23"}
138
+ {"current_steps": 1380, "total_steps": 1465, "loss": 0.3218, "accuracy": 0.887499988079071, "lr": 6.525037936267071e-08, "epoch": 0.9421000981354269, "percentage": 94.2, "elapsed_time": "3:43:40", "remaining_time": "0:13:46"}
139
+ {"current_steps": 1390, "total_steps": 1465, "loss": 0.3864, "accuracy": 0.8312499523162842, "lr": 5.766312594840667e-08, "epoch": 0.948926910440756, "percentage": 94.88, "elapsed_time": "3:45:16", "remaining_time": "0:12:09"}
140
+ {"current_steps": 1400, "total_steps": 1465, "loss": 0.3447, "accuracy": 0.8531250357627869, "lr": 5.007587253414264e-08, "epoch": 0.9557537227460853, "percentage": 95.56, "elapsed_time": "3:46:51", "remaining_time": "0:10:31"}
141
+ {"current_steps": 1410, "total_steps": 1465, "loss": 0.3718, "accuracy": 0.831250011920929, "lr": 4.2488619119878606e-08, "epoch": 0.9625805350514144, "percentage": 96.25, "elapsed_time": "3:48:29", "remaining_time": "0:08:54"}
142
+ {"current_steps": 1420, "total_steps": 1465, "loss": 0.3678, "accuracy": 0.823437511920929, "lr": 3.4901365705614566e-08, "epoch": 0.9694073473567436, "percentage": 96.93, "elapsed_time": "3:50:05", "remaining_time": "0:07:17"}
143
+ {"current_steps": 1430, "total_steps": 1465, "loss": 0.3681, "accuracy": 0.8484375476837158, "lr": 2.731411229135053e-08, "epoch": 0.9762341596620728, "percentage": 97.61, "elapsed_time": "3:51:40", "remaining_time": "0:05:40"}
144
+ {"current_steps": 1440, "total_steps": 1465, "loss": 0.3734, "accuracy": 0.8500000238418579, "lr": 1.9726858877086493e-08, "epoch": 0.9830609719674019, "percentage": 98.29, "elapsed_time": "3:53:16", "remaining_time": "0:04:02"}
145
+ {"current_steps": 1450, "total_steps": 1465, "loss": 0.3924, "accuracy": 0.8328125476837158, "lr": 1.2139605462822458e-08, "epoch": 0.9898877842727312, "percentage": 98.98, "elapsed_time": "3:54:52", "remaining_time": "0:02:25"}
146
+ {"current_steps": 1460, "total_steps": 1465, "loss": 0.3379, "accuracy": 0.8734375238418579, "lr": 4.552352048558422e-09, "epoch": 0.9967145965780604, "percentage": 99.66, "elapsed_time": "3:56:28", "remaining_time": "0:00:48"}
147
+ {"current_steps": 1465, "total_steps": 1465, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "3:57:37", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1465,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.00682681230532918,
14
+ "grad_norm": 20.491548678692148,
15
+ "learning_rate": 6.122448979591837e-08,
16
+ "logits/chosen": 0.03672148287296295,
17
+ "logits/rejected": 0.041521187871694565,
18
+ "logps/chosen": -191.74862670898438,
19
+ "logps/rejected": -189.4052276611328,
20
+ "loss": 0.6921,
21
+ "rewards/accuracies": 0.41874998807907104,
22
+ "rewards/chosen": 0.0014678842853754759,
23
+ "rewards/margins": 0.0024292597081512213,
24
+ "rewards/rejected": -0.0009613755391910672,
25
+ "step": 10
26
+ },
27
+ {
28
+ "epoch": 0.01365362461065836,
29
+ "grad_norm": 21.860852469835415,
30
+ "learning_rate": 1.2925170068027211e-07,
31
+ "logits/chosen": 0.04523754119873047,
32
+ "logits/rejected": 0.05510401353240013,
33
+ "logps/chosen": -187.8703155517578,
34
+ "logps/rejected": -187.6009979248047,
35
+ "loss": 0.6937,
36
+ "rewards/accuracies": 0.4859375059604645,
37
+ "rewards/chosen": -0.0003124059294350445,
38
+ "rewards/margins": -0.0007655444787815213,
39
+ "rewards/rejected": 0.0004531386948656291,
40
+ "step": 20
41
+ },
42
+ {
43
+ "epoch": 0.02048043691598754,
44
+ "grad_norm": 20.278529512570657,
45
+ "learning_rate": 1.9727891156462583e-07,
46
+ "logits/chosen": 0.020983930677175522,
47
+ "logits/rejected": 0.04532231390476227,
48
+ "logps/chosen": -185.85728454589844,
49
+ "logps/rejected": -188.9866180419922,
50
+ "loss": 0.6936,
51
+ "rewards/accuracies": 0.46875,
52
+ "rewards/chosen": 0.00064073596149683,
53
+ "rewards/margins": -0.0005829028668813407,
54
+ "rewards/rejected": 0.001223638653755188,
55
+ "step": 30
56
+ },
57
+ {
58
+ "epoch": 0.02730724922131672,
59
+ "grad_norm": 19.626379046619967,
60
+ "learning_rate": 2.653061224489796e-07,
61
+ "logits/chosen": 0.03043345920741558,
62
+ "logits/rejected": 0.032446593046188354,
63
+ "logps/chosen": -193.6338653564453,
64
+ "logps/rejected": -190.4232635498047,
65
+ "loss": 0.6913,
66
+ "rewards/accuracies": 0.53125,
67
+ "rewards/chosen": 0.002521326532587409,
68
+ "rewards/margins": 0.004052319563925266,
69
+ "rewards/rejected": -0.0015309930313378572,
70
+ "step": 40
71
+ },
72
+ {
73
+ "epoch": 0.0341340615266459,
74
+ "grad_norm": 21.08295374738999,
75
+ "learning_rate": 3.333333333333333e-07,
76
+ "logits/chosen": 0.04947035759687424,
77
+ "logits/rejected": 0.06372452527284622,
78
+ "logps/chosen": -188.39315795898438,
79
+ "logps/rejected": -190.05992126464844,
80
+ "loss": 0.6942,
81
+ "rewards/accuracies": 0.46406251192092896,
82
+ "rewards/chosen": 0.0021625806111842394,
83
+ "rewards/margins": -0.0017312343697994947,
84
+ "rewards/rejected": 0.003893814980983734,
85
+ "step": 50
86
+ },
87
+ {
88
+ "epoch": 0.04096087383197508,
89
+ "grad_norm": 20.25039554823623,
90
+ "learning_rate": 4.0136054421768705e-07,
91
+ "logits/chosen": 0.053825099021196365,
92
+ "logits/rejected": 0.0521962009370327,
93
+ "logps/chosen": -189.28480529785156,
94
+ "logps/rejected": -184.31430053710938,
95
+ "loss": 0.6937,
96
+ "rewards/accuracies": 0.5015624761581421,
97
+ "rewards/chosen": 0.004196351859718561,
98
+ "rewards/margins": -0.0006979627651162446,
99
+ "rewards/rejected": 0.0048943147994577885,
100
+ "step": 60
101
+ },
102
+ {
103
+ "epoch": 0.04778768613730426,
104
+ "grad_norm": 22.505298366939336,
105
+ "learning_rate": 4.693877551020408e-07,
106
+ "logits/chosen": 0.03855639323592186,
107
+ "logits/rejected": 0.041457682847976685,
108
+ "logps/chosen": -189.49111938476562,
109
+ "logps/rejected": -190.42034912109375,
110
+ "loss": 0.6933,
111
+ "rewards/accuracies": 0.4937499761581421,
112
+ "rewards/chosen": 0.008006598800420761,
113
+ "rewards/margins": 4.7756126150488853e-05,
114
+ "rewards/rejected": 0.007958842441439629,
115
+ "step": 70
116
+ },
117
+ {
118
+ "epoch": 0.05461449844263344,
119
+ "grad_norm": 19.99809543741437,
120
+ "learning_rate": 5.374149659863945e-07,
121
+ "logits/chosen": 0.026321567595005035,
122
+ "logits/rejected": 0.013571225106716156,
123
+ "logps/chosen": -189.8534393310547,
124
+ "logps/rejected": -187.626708984375,
125
+ "loss": 0.6878,
126
+ "rewards/accuracies": 0.6031249761581421,
127
+ "rewards/chosen": 0.018732454627752304,
128
+ "rewards/margins": 0.011271494440734386,
129
+ "rewards/rejected": 0.007460957858711481,
130
+ "step": 80
131
+ },
132
+ {
133
+ "epoch": 0.06144131074796262,
134
+ "grad_norm": 22.176568391543768,
135
+ "learning_rate": 6.054421768707482e-07,
136
+ "logits/chosen": 0.020383019000291824,
137
+ "logits/rejected": 0.02592673897743225,
138
+ "logps/chosen": -186.662841796875,
139
+ "logps/rejected": -189.3004608154297,
140
+ "loss": 0.6876,
141
+ "rewards/accuracies": 0.582812488079071,
142
+ "rewards/chosen": 0.027650414034724236,
143
+ "rewards/margins": 0.011809633113443851,
144
+ "rewards/rejected": 0.01584078185260296,
145
+ "step": 90
146
+ },
147
+ {
148
+ "epoch": 0.0682681230532918,
149
+ "grad_norm": 20.53234701755388,
150
+ "learning_rate": 6.734693877551019e-07,
151
+ "logits/chosen": 0.02966993674635887,
152
+ "logits/rejected": 0.05219441279768944,
153
+ "logps/chosen": -190.25782775878906,
154
+ "logps/rejected": -189.80935668945312,
155
+ "loss": 0.6858,
156
+ "rewards/accuracies": 0.5718749761581421,
157
+ "rewards/chosen": 0.040990687906742096,
158
+ "rewards/margins": 0.01583397202193737,
159
+ "rewards/rejected": 0.025156717747449875,
160
+ "step": 100
161
+ },
162
+ {
163
+ "epoch": 0.07509493535862098,
164
+ "grad_norm": 21.19602898096358,
165
+ "learning_rate": 7.414965986394558e-07,
166
+ "logits/chosen": -0.007384412921965122,
167
+ "logits/rejected": -0.016086794435977936,
168
+ "logps/chosen": -189.52395629882812,
169
+ "logps/rejected": -192.64816284179688,
170
+ "loss": 0.6817,
171
+ "rewards/accuracies": 0.625,
172
+ "rewards/chosen": 0.05572628974914551,
173
+ "rewards/margins": 0.024477079510688782,
174
+ "rewards/rejected": 0.031249215826392174,
175
+ "step": 110
176
+ },
177
+ {
178
+ "epoch": 0.08192174766395016,
179
+ "grad_norm": 20.08862529877448,
180
+ "learning_rate": 8.095238095238095e-07,
181
+ "logits/chosen": -0.04889947175979614,
182
+ "logits/rejected": -0.049361489713191986,
183
+ "logps/chosen": -197.39492797851562,
184
+ "logps/rejected": -192.8791046142578,
185
+ "loss": 0.6828,
186
+ "rewards/accuracies": 0.5843750238418579,
187
+ "rewards/chosen": 0.059998854994773865,
188
+ "rewards/margins": 0.023517701774835587,
189
+ "rewards/rejected": 0.03648114949464798,
190
+ "step": 120
191
+ },
192
+ {
193
+ "epoch": 0.08874855996927934,
194
+ "grad_norm": 19.78186965312465,
195
+ "learning_rate": 8.775510204081632e-07,
196
+ "logits/chosen": -0.022162066772580147,
197
+ "logits/rejected": -0.02603471651673317,
198
+ "logps/chosen": -192.2538604736328,
199
+ "logps/rejected": -190.6973876953125,
200
+ "loss": 0.6782,
201
+ "rewards/accuracies": 0.651562511920929,
202
+ "rewards/chosen": 0.07047584652900696,
203
+ "rewards/margins": 0.03453099727630615,
204
+ "rewards/rejected": 0.035944852977991104,
205
+ "step": 130
206
+ },
207
+ {
208
+ "epoch": 0.09557537227460852,
209
+ "grad_norm": 21.72668562860521,
210
+ "learning_rate": 9.45578231292517e-07,
211
+ "logits/chosen": -0.028122998774051666,
212
+ "logits/rejected": -0.0023567965254187584,
213
+ "logps/chosen": -193.58602905273438,
214
+ "logps/rejected": -189.49517822265625,
215
+ "loss": 0.6721,
216
+ "rewards/accuracies": 0.6499999761581421,
217
+ "rewards/chosen": 0.08927410840988159,
218
+ "rewards/margins": 0.04811044782400131,
219
+ "rewards/rejected": 0.04116365686058998,
220
+ "step": 140
221
+ },
222
+ {
223
+ "epoch": 0.1024021845799377,
224
+ "grad_norm": 20.630914226397604,
225
+ "learning_rate": 9.98482549317147e-07,
226
+ "logits/chosen": -0.07732997089624405,
227
+ "logits/rejected": -0.08366119861602783,
228
+ "logps/chosen": -203.80441284179688,
229
+ "logps/rejected": -202.51812744140625,
230
+ "loss": 0.666,
231
+ "rewards/accuracies": 0.6500000357627869,
232
+ "rewards/chosen": 0.10389578342437744,
233
+ "rewards/margins": 0.06255247443914413,
234
+ "rewards/rejected": 0.04134330898523331,
235
+ "step": 150
236
+ },
237
+ {
238
+ "epoch": 0.10922899688526688,
239
+ "grad_norm": 20.25669433495337,
240
+ "learning_rate": 9.908952959028832e-07,
241
+ "logits/chosen": -0.09441889822483063,
242
+ "logits/rejected": -0.08870529383420944,
243
+ "logps/chosen": -185.63307189941406,
244
+ "logps/rejected": -186.53253173828125,
245
+ "loss": 0.6654,
246
+ "rewards/accuracies": 0.6500000357627869,
247
+ "rewards/chosen": 0.08515263348817825,
248
+ "rewards/margins": 0.0660884901881218,
249
+ "rewards/rejected": 0.019064147025346756,
250
+ "step": 160
251
+ },
252
+ {
253
+ "epoch": 0.11605580919059606,
254
+ "grad_norm": 20.384593980794733,
255
+ "learning_rate": 9.833080424886191e-07,
256
+ "logits/chosen": -0.08715031296014786,
257
+ "logits/rejected": -0.05636933073401451,
258
+ "logps/chosen": -188.3374481201172,
259
+ "logps/rejected": -190.37437438964844,
260
+ "loss": 0.659,
261
+ "rewards/accuracies": 0.6343749761581421,
262
+ "rewards/chosen": 0.04690036177635193,
263
+ "rewards/margins": 0.08634677529335022,
264
+ "rewards/rejected": -0.03944641351699829,
265
+ "step": 170
266
+ },
267
+ {
268
+ "epoch": 0.12288262149592524,
269
+ "grad_norm": 21.86056528276187,
270
+ "learning_rate": 9.75720789074355e-07,
271
+ "logits/chosen": -0.07912790030241013,
272
+ "logits/rejected": -0.07271625846624374,
273
+ "logps/chosen": -197.11959838867188,
274
+ "logps/rejected": -197.41287231445312,
275
+ "loss": 0.6528,
276
+ "rewards/accuracies": 0.6749999523162842,
277
+ "rewards/chosen": 0.04622086510062218,
278
+ "rewards/margins": 0.10496747493743896,
279
+ "rewards/rejected": -0.058746613562107086,
280
+ "step": 180
281
+ },
282
+ {
283
+ "epoch": 0.12970943380125444,
284
+ "grad_norm": 22.24802422589698,
285
+ "learning_rate": 9.68133535660091e-07,
286
+ "logits/chosen": -0.07506565004587173,
287
+ "logits/rejected": -0.05108420550823212,
288
+ "logps/chosen": -190.35340881347656,
289
+ "logps/rejected": -195.009521484375,
290
+ "loss": 0.6441,
291
+ "rewards/accuracies": 0.6812500357627869,
292
+ "rewards/chosen": 0.052541881799697876,
293
+ "rewards/margins": 0.12386594712734222,
294
+ "rewards/rejected": -0.07132406532764435,
295
+ "step": 190
296
+ },
297
+ {
298
+ "epoch": 0.1365362461065836,
299
+ "grad_norm": 22.419822765649933,
300
+ "learning_rate": 9.60546282245827e-07,
301
+ "logits/chosen": -0.11874101310968399,
302
+ "logits/rejected": -0.08336825668811798,
303
+ "logps/chosen": -193.62611389160156,
304
+ "logps/rejected": -196.01084899902344,
305
+ "loss": 0.6249,
306
+ "rewards/accuracies": 0.7046875357627869,
307
+ "rewards/chosen": 0.03949081152677536,
308
+ "rewards/margins": 0.17370560765266418,
309
+ "rewards/rejected": -0.13421478867530823,
310
+ "step": 200
311
+ },
312
+ {
313
+ "epoch": 0.1433630584119128,
314
+ "grad_norm": 22.915739502006815,
315
+ "learning_rate": 9.52959028831563e-07,
316
+ "logits/chosen": -0.17365601658821106,
317
+ "logits/rejected": -0.15520283579826355,
318
+ "logps/chosen": -203.1890869140625,
319
+ "logps/rejected": -200.14974975585938,
320
+ "loss": 0.6287,
321
+ "rewards/accuracies": 0.6687500476837158,
322
+ "rewards/chosen": -0.01979774236679077,
323
+ "rewards/margins": 0.18479280173778534,
324
+ "rewards/rejected": -0.2045905441045761,
325
+ "step": 210
326
+ },
327
+ {
328
+ "epoch": 0.15018987071724196,
329
+ "grad_norm": 20.769969852017695,
330
+ "learning_rate": 9.453717754172988e-07,
331
+ "logits/chosen": -0.1847243756055832,
332
+ "logits/rejected": -0.15192236006259918,
333
+ "logps/chosen": -198.33010864257812,
334
+ "logps/rejected": -200.56228637695312,
335
+ "loss": 0.6015,
336
+ "rewards/accuracies": 0.7109375,
337
+ "rewards/chosen": -0.03269830346107483,
338
+ "rewards/margins": 0.25339096784591675,
339
+ "rewards/rejected": -0.2860892415046692,
340
+ "step": 220
341
+ },
342
+ {
343
+ "epoch": 0.15701668302257116,
344
+ "grad_norm": 21.597574913870996,
345
+ "learning_rate": 9.377845220030348e-07,
346
+ "logits/chosen": -0.21274694800376892,
347
+ "logits/rejected": -0.19206659495830536,
348
+ "logps/chosen": -197.59228515625,
349
+ "logps/rejected": -200.42283630371094,
350
+ "loss": 0.611,
351
+ "rewards/accuracies": 0.6734374761581421,
352
+ "rewards/chosen": -0.09015801548957825,
353
+ "rewards/margins": 0.24926723539829254,
354
+ "rewards/rejected": -0.3394252359867096,
355
+ "step": 230
356
+ },
357
+ {
358
+ "epoch": 0.16384349532790032,
359
+ "grad_norm": 24.09497342960952,
360
+ "learning_rate": 9.301972685887707e-07,
361
+ "logits/chosen": -0.2293986827135086,
362
+ "logits/rejected": -0.19997453689575195,
363
+ "logps/chosen": -191.1751251220703,
364
+ "logps/rejected": -196.63511657714844,
365
+ "loss": 0.6125,
366
+ "rewards/accuracies": 0.690625011920929,
367
+ "rewards/chosen": -0.15094764530658722,
368
+ "rewards/margins": 0.24523335695266724,
369
+ "rewards/rejected": -0.39618098735809326,
370
+ "step": 240
371
+ },
372
+ {
373
+ "epoch": 0.17067030763322952,
374
+ "grad_norm": 22.186402685803138,
375
+ "learning_rate": 9.226100151745068e-07,
376
+ "logits/chosen": -0.23599499464035034,
377
+ "logits/rejected": -0.20987126231193542,
378
+ "logps/chosen": -191.61639404296875,
379
+ "logps/rejected": -197.80091857910156,
380
+ "loss": 0.6205,
381
+ "rewards/accuracies": 0.6546875238418579,
382
+ "rewards/chosen": -0.22373469173908234,
383
+ "rewards/margins": 0.2635762691497803,
384
+ "rewards/rejected": -0.4873109459877014,
385
+ "step": 250
386
+ },
387
+ {
388
+ "epoch": 0.17749711993855868,
389
+ "grad_norm": 23.30196457741843,
390
+ "learning_rate": 9.150227617602428e-07,
391
+ "logits/chosen": -0.2195354700088501,
392
+ "logits/rejected": -0.19019638001918793,
393
+ "logps/chosen": -190.50746154785156,
394
+ "logps/rejected": -195.74331665039062,
395
+ "loss": 0.6056,
396
+ "rewards/accuracies": 0.7046875357627869,
397
+ "rewards/chosen": -0.2523514926433563,
398
+ "rewards/margins": 0.29894089698791504,
399
+ "rewards/rejected": -0.5512923002243042,
400
+ "step": 260
401
+ },
402
+ {
403
+ "epoch": 0.18432393224388788,
404
+ "grad_norm": 23.437160399579792,
405
+ "learning_rate": 9.074355083459787e-07,
406
+ "logits/chosen": -0.2144363671541214,
407
+ "logits/rejected": -0.19538246095180511,
408
+ "logps/chosen": -194.883056640625,
409
+ "logps/rejected": -202.83575439453125,
410
+ "loss": 0.595,
411
+ "rewards/accuracies": 0.7078125476837158,
412
+ "rewards/chosen": -0.27382633090019226,
413
+ "rewards/margins": 0.3095867931842804,
414
+ "rewards/rejected": -0.5834130644798279,
415
+ "step": 270
416
+ },
417
+ {
418
+ "epoch": 0.19115074454921704,
419
+ "grad_norm": 23.67928529051871,
420
+ "learning_rate": 8.998482549317147e-07,
421
+ "logits/chosen": -0.2671777606010437,
422
+ "logits/rejected": -0.23835715651512146,
423
+ "logps/chosen": -189.7034912109375,
424
+ "logps/rejected": -194.55117797851562,
425
+ "loss": 0.589,
426
+ "rewards/accuracies": 0.7000000476837158,
427
+ "rewards/chosen": -0.2815781235694885,
428
+ "rewards/margins": 0.34006255865097046,
429
+ "rewards/rejected": -0.621640682220459,
430
+ "step": 280
431
+ },
432
+ {
433
+ "epoch": 0.19797755685454624,
434
+ "grad_norm": 26.3785919721159,
435
+ "learning_rate": 8.922610015174506e-07,
436
+ "logits/chosen": -0.2851921319961548,
437
+ "logits/rejected": -0.2668570280075073,
438
+ "logps/chosen": -202.77801513671875,
439
+ "logps/rejected": -207.8894805908203,
440
+ "loss": 0.59,
441
+ "rewards/accuracies": 0.7046875357627869,
442
+ "rewards/chosen": -0.33676964044570923,
443
+ "rewards/margins": 0.35969871282577515,
444
+ "rewards/rejected": -0.6964683532714844,
445
+ "step": 290
446
+ },
447
+ {
448
+ "epoch": 0.2048043691598754,
449
+ "grad_norm": 23.715391013722297,
450
+ "learning_rate": 8.846737481031866e-07,
451
+ "logits/chosen": -0.2776036262512207,
452
+ "logits/rejected": -0.24332435429096222,
453
+ "logps/chosen": -201.10296630859375,
454
+ "logps/rejected": -203.72195434570312,
455
+ "loss": 0.6111,
456
+ "rewards/accuracies": 0.715624988079071,
457
+ "rewards/chosen": -0.3781723380088806,
458
+ "rewards/margins": 0.3227519989013672,
459
+ "rewards/rejected": -0.700924277305603,
460
+ "step": 300
461
+ },
462
+ {
463
+ "epoch": 0.2116311814652046,
464
+ "grad_norm": 21.57268816738927,
465
+ "learning_rate": 8.770864946889226e-07,
466
+ "logits/chosen": -0.29242080450057983,
467
+ "logits/rejected": -0.2669425308704376,
468
+ "logps/chosen": -204.4817352294922,
469
+ "logps/rejected": -214.0943603515625,
470
+ "loss": 0.5794,
471
+ "rewards/accuracies": 0.7265625,
472
+ "rewards/chosen": -0.3647349178791046,
473
+ "rewards/margins": 0.4395143985748291,
474
+ "rewards/rejected": -0.8042493462562561,
475
+ "step": 310
476
+ },
477
+ {
478
+ "epoch": 0.21845799377053376,
479
+ "grad_norm": 25.227342019618998,
480
+ "learning_rate": 8.694992412746586e-07,
481
+ "logits/chosen": -0.27386438846588135,
482
+ "logits/rejected": -0.2711098790168762,
483
+ "logps/chosen": -198.40101623535156,
484
+ "logps/rejected": -204.6220703125,
485
+ "loss": 0.5727,
486
+ "rewards/accuracies": 0.7281250357627869,
487
+ "rewards/chosen": -0.3862449824810028,
488
+ "rewards/margins": 0.41143903136253357,
489
+ "rewards/rejected": -0.7976840734481812,
490
+ "step": 320
491
+ },
492
+ {
493
+ "epoch": 0.22528480607586296,
494
+ "grad_norm": 24.00522520700325,
495
+ "learning_rate": 8.619119878603945e-07,
496
+ "logits/chosen": -0.3334537744522095,
497
+ "logits/rejected": -0.3187546730041504,
498
+ "logps/chosen": -208.01986694335938,
499
+ "logps/rejected": -212.91488647460938,
500
+ "loss": 0.5913,
501
+ "rewards/accuracies": 0.6703125238418579,
502
+ "rewards/chosen": -0.4798099398612976,
503
+ "rewards/margins": 0.37955817580223083,
504
+ "rewards/rejected": -0.8593681454658508,
505
+ "step": 330
506
+ },
507
+ {
508
+ "epoch": 0.23211161838119213,
509
+ "grad_norm": 23.49360024665317,
510
+ "learning_rate": 8.543247344461305e-07,
511
+ "logits/chosen": -0.30438894033432007,
512
+ "logits/rejected": -0.28073978424072266,
513
+ "logps/chosen": -203.7110595703125,
514
+ "logps/rejected": -211.83615112304688,
515
+ "loss": 0.56,
516
+ "rewards/accuracies": 0.7328125238418579,
517
+ "rewards/chosen": -0.3902357518672943,
518
+ "rewards/margins": 0.5086088180541992,
519
+ "rewards/rejected": -0.8988445401191711,
520
+ "step": 340
521
+ },
522
+ {
523
+ "epoch": 0.23893843068652132,
524
+ "grad_norm": 23.086500001623612,
525
+ "learning_rate": 8.467374810318663e-07,
526
+ "logits/chosen": -0.3257724940776825,
527
+ "logits/rejected": -0.2853447198867798,
528
+ "logps/chosen": -204.09765625,
529
+ "logps/rejected": -212.38494873046875,
530
+ "loss": 0.5515,
531
+ "rewards/accuracies": 0.706250011920929,
532
+ "rewards/chosen": -0.4673992991447449,
533
+ "rewards/margins": 0.5267953872680664,
534
+ "rewards/rejected": -0.9941946864128113,
535
+ "step": 350
536
+ },
537
+ {
538
+ "epoch": 0.24576524299185049,
539
+ "grad_norm": 24.60129579583855,
540
+ "learning_rate": 8.391502276176023e-07,
541
+ "logits/chosen": -0.3029869794845581,
542
+ "logits/rejected": -0.2718327045440674,
543
+ "logps/chosen": -196.5174560546875,
544
+ "logps/rejected": -204.4929656982422,
545
+ "loss": 0.5809,
546
+ "rewards/accuracies": 0.7046875357627869,
547
+ "rewards/chosen": -0.4800136089324951,
548
+ "rewards/margins": 0.43177759647369385,
549
+ "rewards/rejected": -0.9117912650108337,
550
+ "step": 360
551
+ },
552
+ {
553
+ "epoch": 0.25259205529717965,
554
+ "grad_norm": 23.03353178121409,
555
+ "learning_rate": 8.315629742033384e-07,
556
+ "logits/chosen": -0.28175657987594604,
557
+ "logits/rejected": -0.2525416612625122,
558
+ "logps/chosen": -197.58517456054688,
559
+ "logps/rejected": -210.83853149414062,
560
+ "loss": 0.5675,
561
+ "rewards/accuracies": 0.7234375476837158,
562
+ "rewards/chosen": -0.5489044785499573,
563
+ "rewards/margins": 0.4759043753147125,
564
+ "rewards/rejected": -1.0248088836669922,
565
+ "step": 370
566
+ },
567
+ {
568
+ "epoch": 0.2594188676025089,
569
+ "grad_norm": 21.702116754195792,
570
+ "learning_rate": 8.239757207890743e-07,
571
+ "logits/chosen": -0.3090224266052246,
572
+ "logits/rejected": -0.2872709333896637,
573
+ "logps/chosen": -204.044921875,
574
+ "logps/rejected": -214.3769989013672,
575
+ "loss": 0.5414,
576
+ "rewards/accuracies": 0.7265625,
577
+ "rewards/chosen": -0.501671552658081,
578
+ "rewards/margins": 0.5782625675201416,
579
+ "rewards/rejected": -1.0799341201782227,
580
+ "step": 380
581
+ },
582
+ {
583
+ "epoch": 0.26624567990783804,
584
+ "grad_norm": 22.690534272455945,
585
+ "learning_rate": 8.163884673748103e-07,
586
+ "logits/chosen": -0.2652078866958618,
587
+ "logits/rejected": -0.22916777431964874,
588
+ "logps/chosen": -206.28855895996094,
589
+ "logps/rejected": -217.3023681640625,
590
+ "loss": 0.532,
591
+ "rewards/accuracies": 0.7343750596046448,
592
+ "rewards/chosen": -0.47486239671707153,
593
+ "rewards/margins": 0.6135950684547424,
594
+ "rewards/rejected": -1.088457465171814,
595
+ "step": 390
596
+ },
597
+ {
598
+ "epoch": 0.2730724922131672,
599
+ "grad_norm": 24.587498727216616,
600
+ "learning_rate": 8.088012139605462e-07,
601
+ "logits/chosen": -0.28489071130752563,
602
+ "logits/rejected": -0.23875750601291656,
603
+ "logps/chosen": -202.77565002441406,
604
+ "logps/rejected": -216.6030731201172,
605
+ "loss": 0.5272,
606
+ "rewards/accuracies": 0.737500011920929,
607
+ "rewards/chosen": -0.5735213756561279,
608
+ "rewards/margins": 0.6210550665855408,
609
+ "rewards/rejected": -1.194576382637024,
610
+ "step": 400
611
+ },
612
+ {
613
+ "epoch": 0.2798993045184964,
614
+ "grad_norm": 24.707605897401567,
615
+ "learning_rate": 8.012139605462822e-07,
616
+ "logits/chosen": -0.3593894839286804,
617
+ "logits/rejected": -0.3138624429702759,
618
+ "logps/chosen": -202.06204223632812,
619
+ "logps/rejected": -208.73065185546875,
620
+ "loss": 0.5575,
621
+ "rewards/accuracies": 0.721875011920929,
622
+ "rewards/chosen": -0.604642391204834,
623
+ "rewards/margins": 0.5522481203079224,
624
+ "rewards/rejected": -1.156890630722046,
625
+ "step": 410
626
+ },
627
+ {
628
+ "epoch": 0.2867261168238256,
629
+ "grad_norm": 24.754070000277498,
630
+ "learning_rate": 7.936267071320181e-07,
631
+ "logits/chosen": -0.3502323627471924,
632
+ "logits/rejected": -0.3173756003379822,
633
+ "logps/chosen": -207.6633758544922,
634
+ "logps/rejected": -216.3917236328125,
635
+ "loss": 0.5265,
636
+ "rewards/accuracies": 0.7484375238418579,
637
+ "rewards/chosen": -0.6551162004470825,
638
+ "rewards/margins": 0.6169639229774475,
639
+ "rewards/rejected": -1.2720801830291748,
640
+ "step": 420
641
+ },
642
+ {
643
+ "epoch": 0.29355292912915476,
644
+ "grad_norm": 23.564476771066985,
645
+ "learning_rate": 7.860394537177542e-07,
646
+ "logits/chosen": -0.3500007092952728,
647
+ "logits/rejected": -0.32545575499534607,
648
+ "logps/chosen": -211.29928588867188,
649
+ "logps/rejected": -227.12037658691406,
650
+ "loss": 0.5223,
651
+ "rewards/accuracies": 0.7421875596046448,
652
+ "rewards/chosen": -0.7528213262557983,
653
+ "rewards/margins": 0.739406943321228,
654
+ "rewards/rejected": -1.492228388786316,
655
+ "step": 430
656
+ },
657
+ {
658
+ "epoch": 0.3003797414344839,
659
+ "grad_norm": 21.091018091079327,
660
+ "learning_rate": 7.784522003034901e-07,
661
+ "logits/chosen": -0.35516998171806335,
662
+ "logits/rejected": -0.3074837327003479,
663
+ "logps/chosen": -203.1188507080078,
664
+ "logps/rejected": -212.15496826171875,
665
+ "loss": 0.5055,
666
+ "rewards/accuracies": 0.7765625715255737,
667
+ "rewards/chosen": -0.6801650524139404,
668
+ "rewards/margins": 0.7159599661827087,
669
+ "rewards/rejected": -1.396125078201294,
670
+ "step": 440
671
+ },
672
+ {
673
+ "epoch": 0.3072065537398131,
674
+ "grad_norm": 30.178688833532316,
675
+ "learning_rate": 7.708649468892261e-07,
676
+ "logits/chosen": -0.3771928548812866,
677
+ "logits/rejected": -0.34754854440689087,
678
+ "logps/chosen": -208.95216369628906,
679
+ "logps/rejected": -225.38938903808594,
680
+ "loss": 0.5226,
681
+ "rewards/accuracies": 0.746874988079071,
682
+ "rewards/chosen": -0.7856850624084473,
683
+ "rewards/margins": 0.6984450817108154,
684
+ "rewards/rejected": -1.4841301441192627,
685
+ "step": 450
686
+ },
687
+ {
688
+ "epoch": 0.3140333660451423,
689
+ "grad_norm": 22.73508892423378,
690
+ "learning_rate": 7.632776934749621e-07,
691
+ "logits/chosen": -0.40090760588645935,
692
+ "logits/rejected": -0.3806273937225342,
693
+ "logps/chosen": -208.29766845703125,
694
+ "logps/rejected": -223.73020935058594,
695
+ "loss": 0.5013,
696
+ "rewards/accuracies": 0.765625,
697
+ "rewards/chosen": -0.7431963086128235,
698
+ "rewards/margins": 0.8224382400512695,
699
+ "rewards/rejected": -1.5656344890594482,
700
+ "step": 460
701
+ },
702
+ {
703
+ "epoch": 0.3208601783504715,
704
+ "grad_norm": 24.65367082247547,
705
+ "learning_rate": 7.55690440060698e-07,
706
+ "logits/chosen": -0.41392359137535095,
707
+ "logits/rejected": -0.3990693688392639,
708
+ "logps/chosen": -211.69845581054688,
709
+ "logps/rejected": -222.681884765625,
710
+ "loss": 0.4896,
711
+ "rewards/accuracies": 0.7671874761581421,
712
+ "rewards/chosen": -0.7812504768371582,
713
+ "rewards/margins": 0.8228715062141418,
714
+ "rewards/rejected": -1.6041220426559448,
715
+ "step": 470
716
+ },
717
+ {
718
+ "epoch": 0.32768699065580065,
719
+ "grad_norm": 26.060565630616303,
720
+ "learning_rate": 7.481031866464339e-07,
721
+ "logits/chosen": -0.4470677673816681,
722
+ "logits/rejected": -0.4043146073818207,
723
+ "logps/chosen": -201.87158203125,
724
+ "logps/rejected": -216.65240478515625,
725
+ "loss": 0.5178,
726
+ "rewards/accuracies": 0.7406250238418579,
727
+ "rewards/chosen": -0.8349807858467102,
728
+ "rewards/margins": 0.7298619151115417,
729
+ "rewards/rejected": -1.564842700958252,
730
+ "step": 480
731
+ },
732
+ {
733
+ "epoch": 0.3345138029611298,
734
+ "grad_norm": 24.867787006387463,
735
+ "learning_rate": 7.405159332321699e-07,
736
+ "logits/chosen": -0.4602758288383484,
737
+ "logits/rejected": -0.4031441807746887,
738
+ "logps/chosen": -215.20541381835938,
739
+ "logps/rejected": -234.6583251953125,
740
+ "loss": 0.5155,
741
+ "rewards/accuracies": 0.7484375238418579,
742
+ "rewards/chosen": -0.9265861511230469,
743
+ "rewards/margins": 0.8055697679519653,
744
+ "rewards/rejected": -1.7321559190750122,
745
+ "step": 490
746
+ },
747
+ {
748
+ "epoch": 0.34134061526645904,
749
+ "grad_norm": 32.86790243336268,
750
+ "learning_rate": 7.329286798179059e-07,
751
+ "logits/chosen": -0.4144153594970703,
752
+ "logits/rejected": -0.3892706036567688,
753
+ "logps/chosen": -216.45887756347656,
754
+ "logps/rejected": -225.97056579589844,
755
+ "loss": 0.5274,
756
+ "rewards/accuracies": 0.7328125238418579,
757
+ "rewards/chosen": -0.9314414262771606,
758
+ "rewards/margins": 0.7752954363822937,
759
+ "rewards/rejected": -1.7067368030548096,
760
+ "step": 500
761
+ },
762
+ {
763
+ "epoch": 0.3481674275717882,
764
+ "grad_norm": 29.0406209714796,
765
+ "learning_rate": 7.253414264036418e-07,
766
+ "logits/chosen": -0.4518946707248688,
767
+ "logits/rejected": -0.4360005855560303,
768
+ "logps/chosen": -210.40875244140625,
769
+ "logps/rejected": -227.6586456298828,
770
+ "loss": 0.4918,
771
+ "rewards/accuracies": 0.7640624642372131,
772
+ "rewards/chosen": -0.7644888162612915,
773
+ "rewards/margins": 0.8264600038528442,
774
+ "rewards/rejected": -1.5909489393234253,
775
+ "step": 510
776
+ },
777
+ {
778
+ "epoch": 0.35499423987711737,
779
+ "grad_norm": 29.792037648827193,
780
+ "learning_rate": 7.177541729893778e-07,
781
+ "logits/chosen": -0.46055272221565247,
782
+ "logits/rejected": -0.41955289244651794,
783
+ "logps/chosen": -203.9451904296875,
784
+ "logps/rejected": -225.48402404785156,
785
+ "loss": 0.5137,
786
+ "rewards/accuracies": 0.75,
787
+ "rewards/chosen": -0.7920026779174805,
788
+ "rewards/margins": 0.7750235795974731,
789
+ "rewards/rejected": -1.5670262575149536,
790
+ "step": 520
791
+ },
792
+ {
793
+ "epoch": 0.36182105218244653,
794
+ "grad_norm": 28.48324275582042,
795
+ "learning_rate": 7.101669195751137e-07,
796
+ "logits/chosen": -0.44266417622566223,
797
+ "logits/rejected": -0.4136849045753479,
798
+ "logps/chosen": -217.11045837402344,
799
+ "logps/rejected": -232.384521484375,
800
+ "loss": 0.5059,
801
+ "rewards/accuracies": 0.7500000596046448,
802
+ "rewards/chosen": -0.9079422950744629,
803
+ "rewards/margins": 0.8452929258346558,
804
+ "rewards/rejected": -1.7532353401184082,
805
+ "step": 530
806
+ },
807
+ {
808
+ "epoch": 0.36864786448777576,
809
+ "grad_norm": 24.346858846505146,
810
+ "learning_rate": 7.025796661608497e-07,
811
+ "logits/chosen": -0.4453073740005493,
812
+ "logits/rejected": -0.39773428440093994,
813
+ "logps/chosen": -199.64686584472656,
814
+ "logps/rejected": -217.36294555664062,
815
+ "loss": 0.5282,
816
+ "rewards/accuracies": 0.745312511920929,
817
+ "rewards/chosen": -0.7881425023078918,
818
+ "rewards/margins": 0.745051383972168,
819
+ "rewards/rejected": -1.533193826675415,
820
+ "step": 540
821
+ },
822
+ {
823
+ "epoch": 0.3754746767931049,
824
+ "grad_norm": 23.88017645464549,
825
+ "learning_rate": 6.949924127465857e-07,
826
+ "logits/chosen": -0.4227825701236725,
827
+ "logits/rejected": -0.3899107873439789,
828
+ "logps/chosen": -218.3785400390625,
829
+ "logps/rejected": -230.14222717285156,
830
+ "loss": 0.5021,
831
+ "rewards/accuracies": 0.7703125476837158,
832
+ "rewards/chosen": -0.704402506351471,
833
+ "rewards/margins": 0.8275265693664551,
834
+ "rewards/rejected": -1.5319291353225708,
835
+ "step": 550
836
+ },
837
+ {
838
+ "epoch": 0.3823014890984341,
839
+ "grad_norm": 23.672046628232867,
840
+ "learning_rate": 6.874051593323217e-07,
841
+ "logits/chosen": -0.42757853865623474,
842
+ "logits/rejected": -0.394180566072464,
843
+ "logps/chosen": -208.079345703125,
844
+ "logps/rejected": -228.22598266601562,
845
+ "loss": 0.4667,
846
+ "rewards/accuracies": 0.770312488079071,
847
+ "rewards/chosen": -0.8188365697860718,
848
+ "rewards/margins": 0.9388971328735352,
849
+ "rewards/rejected": -1.7577338218688965,
850
+ "step": 560
851
+ },
852
+ {
853
+ "epoch": 0.38912830140376325,
854
+ "grad_norm": 27.539677366232738,
855
+ "learning_rate": 6.798179059180577e-07,
856
+ "logits/chosen": -0.4404156506061554,
857
+ "logits/rejected": -0.3975413739681244,
858
+ "logps/chosen": -208.03125,
859
+ "logps/rejected": -224.20956420898438,
860
+ "loss": 0.5004,
861
+ "rewards/accuracies": 0.7593750357627869,
862
+ "rewards/chosen": -0.8374041318893433,
863
+ "rewards/margins": 0.7886074781417847,
864
+ "rewards/rejected": -1.6260114908218384,
865
+ "step": 570
866
+ },
867
+ {
868
+ "epoch": 0.3959551137090925,
869
+ "grad_norm": 25.29375987198196,
870
+ "learning_rate": 6.722306525037936e-07,
871
+ "logits/chosen": -0.4404994249343872,
872
+ "logits/rejected": -0.40123340487480164,
873
+ "logps/chosen": -213.8634490966797,
874
+ "logps/rejected": -234.7059326171875,
875
+ "loss": 0.497,
876
+ "rewards/accuracies": 0.768750011920929,
877
+ "rewards/chosen": -0.8731653094291687,
878
+ "rewards/margins": 0.9025252461433411,
879
+ "rewards/rejected": -1.7756905555725098,
880
+ "step": 580
881
+ },
882
+ {
883
+ "epoch": 0.40278192601442164,
884
+ "grad_norm": 23.196272876570017,
885
+ "learning_rate": 6.646433990895296e-07,
886
+ "logits/chosen": -0.42805609107017517,
887
+ "logits/rejected": -0.3933747410774231,
888
+ "logps/chosen": -210.49766540527344,
889
+ "logps/rejected": -230.8019256591797,
890
+ "loss": 0.472,
891
+ "rewards/accuracies": 0.7718750238418579,
892
+ "rewards/chosen": -0.9004274606704712,
893
+ "rewards/margins": 0.9456923604011536,
894
+ "rewards/rejected": -1.84611976146698,
895
+ "step": 590
896
+ },
897
+ {
898
+ "epoch": 0.4096087383197508,
899
+ "grad_norm": 26.112729497646914,
900
+ "learning_rate": 6.570561456752655e-07,
901
+ "logits/chosen": -0.419676810503006,
902
+ "logits/rejected": -0.3932231068611145,
903
+ "logps/chosen": -212.6820831298828,
904
+ "logps/rejected": -230.1705322265625,
905
+ "loss": 0.4551,
906
+ "rewards/accuracies": 0.776562511920929,
907
+ "rewards/chosen": -0.9198075532913208,
908
+ "rewards/margins": 1.0145457983016968,
909
+ "rewards/rejected": -1.9343533515930176,
910
+ "step": 600
911
+ },
912
+ {
913
+ "epoch": 0.41643555062508,
914
+ "grad_norm": 26.539025702964505,
915
+ "learning_rate": 6.494688922610015e-07,
916
+ "logits/chosen": -0.47971057891845703,
917
+ "logits/rejected": -0.43692541122436523,
918
+ "logps/chosen": -212.31594848632812,
919
+ "logps/rejected": -234.5380859375,
920
+ "loss": 0.4563,
921
+ "rewards/accuracies": 0.770312488079071,
922
+ "rewards/chosen": -1.0747839212417603,
923
+ "rewards/margins": 1.078429937362671,
924
+ "rewards/rejected": -2.1532137393951416,
925
+ "step": 610
926
+ },
927
+ {
928
+ "epoch": 0.4232623629304092,
929
+ "grad_norm": 26.2859842178028,
930
+ "learning_rate": 6.418816388467374e-07,
931
+ "logits/chosen": -0.4652007818222046,
932
+ "logits/rejected": -0.4464990496635437,
933
+ "logps/chosen": -212.9930419921875,
934
+ "logps/rejected": -230.19207763671875,
935
+ "loss": 0.4778,
936
+ "rewards/accuracies": 0.7906250357627869,
937
+ "rewards/chosen": -1.1166890859603882,
938
+ "rewards/margins": 0.9617180228233337,
939
+ "rewards/rejected": -2.0784072875976562,
940
+ "step": 620
941
+ },
942
+ {
943
+ "epoch": 0.43008917523573836,
944
+ "grad_norm": 27.943160005363282,
945
+ "learning_rate": 6.342943854324734e-07,
946
+ "logits/chosen": -0.507358968257904,
947
+ "logits/rejected": -0.46083295345306396,
948
+ "logps/chosen": -211.0389404296875,
949
+ "logps/rejected": -234.06576538085938,
950
+ "loss": 0.4689,
951
+ "rewards/accuracies": 0.78125,
952
+ "rewards/chosen": -1.125166654586792,
953
+ "rewards/margins": 1.1086124181747437,
954
+ "rewards/rejected": -2.233778953552246,
955
+ "step": 630
956
+ },
957
+ {
958
+ "epoch": 0.43691598754106753,
959
+ "grad_norm": 27.031702699703523,
960
+ "learning_rate": 6.267071320182093e-07,
961
+ "logits/chosen": -0.5109987854957581,
962
+ "logits/rejected": -0.4727884531021118,
963
+ "logps/chosen": -216.13302612304688,
964
+ "logps/rejected": -241.88287353515625,
965
+ "loss": 0.4635,
966
+ "rewards/accuracies": 0.7875000238418579,
967
+ "rewards/chosen": -1.1550945043563843,
968
+ "rewards/margins": 1.1085400581359863,
969
+ "rewards/rejected": -2.263634443283081,
970
+ "step": 640
971
+ },
972
+ {
973
+ "epoch": 0.4437427998463967,
974
+ "grad_norm": 26.49416191451856,
975
+ "learning_rate": 6.191198786039453e-07,
976
+ "logits/chosen": -0.5049822330474854,
977
+ "logits/rejected": -0.46804797649383545,
978
+ "logps/chosen": -220.15802001953125,
979
+ "logps/rejected": -241.11386108398438,
980
+ "loss": 0.4646,
981
+ "rewards/accuracies": 0.770312488079071,
982
+ "rewards/chosen": -1.1580806970596313,
983
+ "rewards/margins": 1.064436435699463,
984
+ "rewards/rejected": -2.222517490386963,
985
+ "step": 650
986
+ },
987
+ {
988
+ "epoch": 0.4505696121517259,
989
+ "grad_norm": 28.052993928802096,
990
+ "learning_rate": 6.115326251896813e-07,
991
+ "logits/chosen": -0.5224714875221252,
992
+ "logits/rejected": -0.496852308511734,
993
+ "logps/chosen": -217.48992919921875,
994
+ "logps/rejected": -234.48318481445312,
995
+ "loss": 0.5188,
996
+ "rewards/accuracies": 0.7671874761581421,
997
+ "rewards/chosen": -1.1128088235855103,
998
+ "rewards/margins": 0.9438337087631226,
999
+ "rewards/rejected": -2.056642532348633,
1000
+ "step": 660
1001
+ },
1002
+ {
1003
+ "epoch": 0.4573964244570551,
1004
+ "grad_norm": 32.11947138128127,
1005
+ "learning_rate": 6.039453717754173e-07,
1006
+ "logits/chosen": -0.4993141293525696,
1007
+ "logits/rejected": -0.4682856798171997,
1008
+ "logps/chosen": -206.40176391601562,
1009
+ "logps/rejected": -231.08042907714844,
1010
+ "loss": 0.4953,
1011
+ "rewards/accuracies": 0.7437499761581421,
1012
+ "rewards/chosen": -1.0732593536376953,
1013
+ "rewards/margins": 1.1166470050811768,
1014
+ "rewards/rejected": -2.189906358718872,
1015
+ "step": 670
1016
+ },
1017
+ {
1018
+ "epoch": 0.46422323676238425,
1019
+ "grad_norm": 24.595239877463356,
1020
+ "learning_rate": 5.963581183611533e-07,
1021
+ "logits/chosen": -0.5414324998855591,
1022
+ "logits/rejected": -0.5145028233528137,
1023
+ "logps/chosen": -219.66567993164062,
1024
+ "logps/rejected": -236.0765380859375,
1025
+ "loss": 0.4608,
1026
+ "rewards/accuracies": 0.7781250476837158,
1027
+ "rewards/chosen": -0.9715930819511414,
1028
+ "rewards/margins": 1.0554088354110718,
1029
+ "rewards/rejected": -2.0270018577575684,
1030
+ "step": 680
1031
+ },
1032
+ {
1033
+ "epoch": 0.47105004906771347,
1034
+ "grad_norm": 27.819824043736283,
1035
+ "learning_rate": 5.887708649468892e-07,
1036
+ "logits/chosen": -0.482106477022171,
1037
+ "logits/rejected": -0.43574321269989014,
1038
+ "logps/chosen": -211.92596435546875,
1039
+ "logps/rejected": -234.6639862060547,
1040
+ "loss": 0.4352,
1041
+ "rewards/accuracies": 0.792187511920929,
1042
+ "rewards/chosen": -0.9926649332046509,
1043
+ "rewards/margins": 1.141036033630371,
1044
+ "rewards/rejected": -2.1337008476257324,
1045
+ "step": 690
1046
+ },
1047
+ {
1048
+ "epoch": 0.47787686137304264,
1049
+ "grad_norm": 27.82950606174818,
1050
+ "learning_rate": 5.811836115326252e-07,
1051
+ "logits/chosen": -0.491192102432251,
1052
+ "logits/rejected": -0.45507892966270447,
1053
+ "logps/chosen": -215.52423095703125,
1054
+ "logps/rejected": -239.1810302734375,
1055
+ "loss": 0.4534,
1056
+ "rewards/accuracies": 0.7812500596046448,
1057
+ "rewards/chosen": -1.0811206102371216,
1058
+ "rewards/margins": 1.171852469444275,
1059
+ "rewards/rejected": -2.2529730796813965,
1060
+ "step": 700
1061
+ },
1062
+ {
1063
+ "epoch": 0.4847036736783718,
1064
+ "grad_norm": 32.40109215164061,
1065
+ "learning_rate": 5.735963581183611e-07,
1066
+ "logits/chosen": -0.48725226521492004,
1067
+ "logits/rejected": -0.4451846480369568,
1068
+ "logps/chosen": -211.22933959960938,
1069
+ "logps/rejected": -236.77740478515625,
1070
+ "loss": 0.4487,
1071
+ "rewards/accuracies": 0.7828124761581421,
1072
+ "rewards/chosen": -1.0895929336547852,
1073
+ "rewards/margins": 1.1770341396331787,
1074
+ "rewards/rejected": -2.2666268348693848,
1075
+ "step": 710
1076
+ },
1077
+ {
1078
+ "epoch": 0.49153048598370097,
1079
+ "grad_norm": 27.259651037643604,
1080
+ "learning_rate": 5.660091047040971e-07,
1081
+ "logits/chosen": -0.5053711533546448,
1082
+ "logits/rejected": -0.4444194436073303,
1083
+ "logps/chosen": -205.80319213867188,
1084
+ "logps/rejected": -230.7117919921875,
1085
+ "loss": 0.4743,
1086
+ "rewards/accuracies": 0.770312488079071,
1087
+ "rewards/chosen": -1.1533528566360474,
1088
+ "rewards/margins": 1.07535982131958,
1089
+ "rewards/rejected": -2.228712797164917,
1090
+ "step": 720
1091
+ },
1092
+ {
1093
+ "epoch": 0.4983572982890302,
1094
+ "grad_norm": 23.45407239305211,
1095
+ "learning_rate": 5.584218512898331e-07,
1096
+ "logits/chosen": -0.46755921840667725,
1097
+ "logits/rejected": -0.41828638315200806,
1098
+ "logps/chosen": -214.959716796875,
1099
+ "logps/rejected": -237.14413452148438,
1100
+ "loss": 0.4451,
1101
+ "rewards/accuracies": 0.8125,
1102
+ "rewards/chosen": -1.1057405471801758,
1103
+ "rewards/margins": 1.1711297035217285,
1104
+ "rewards/rejected": -2.2768704891204834,
1105
+ "step": 730
1106
+ },
1107
+ {
1108
+ "epoch": 0.5051841105943593,
1109
+ "grad_norm": 24.513672931022274,
1110
+ "learning_rate": 5.508345978755691e-07,
1111
+ "logits/chosen": -0.5107758045196533,
1112
+ "logits/rejected": -0.47158223390579224,
1113
+ "logps/chosen": -214.1978759765625,
1114
+ "logps/rejected": -236.34100341796875,
1115
+ "loss": 0.4356,
1116
+ "rewards/accuracies": 0.796875,
1117
+ "rewards/chosen": -1.0896263122558594,
1118
+ "rewards/margins": 1.132210612297058,
1119
+ "rewards/rejected": -2.221837043762207,
1120
+ "step": 740
1121
+ },
1122
+ {
1123
+ "epoch": 0.5120109228996885,
1124
+ "grad_norm": 27.557361902005226,
1125
+ "learning_rate": 5.432473444613049e-07,
1126
+ "logits/chosen": -0.47495898604393005,
1127
+ "logits/rejected": -0.42891502380371094,
1128
+ "logps/chosen": -215.3628692626953,
1129
+ "logps/rejected": -240.29644775390625,
1130
+ "loss": 0.4433,
1131
+ "rewards/accuracies": 0.8046875596046448,
1132
+ "rewards/chosen": -1.1231842041015625,
1133
+ "rewards/margins": 1.1870129108428955,
1134
+ "rewards/rejected": -2.310196876525879,
1135
+ "step": 750
1136
+ },
1137
+ {
1138
+ "epoch": 0.5188377352050177,
1139
+ "grad_norm": 25.763088367806024,
1140
+ "learning_rate": 5.356600910470409e-07,
1141
+ "logits/chosen": -0.5234218835830688,
1142
+ "logits/rejected": -0.46476346254348755,
1143
+ "logps/chosen": -214.0421142578125,
1144
+ "logps/rejected": -238.0985565185547,
1145
+ "loss": 0.4236,
1146
+ "rewards/accuracies": 0.800000011920929,
1147
+ "rewards/chosen": -1.1129274368286133,
1148
+ "rewards/margins": 1.2772108316421509,
1149
+ "rewards/rejected": -2.3901383876800537,
1150
+ "step": 760
1151
+ },
1152
+ {
1153
+ "epoch": 0.5256645475103469,
1154
+ "grad_norm": 27.345063991868273,
1155
+ "learning_rate": 5.280728376327769e-07,
1156
+ "logits/chosen": -0.5037857294082642,
1157
+ "logits/rejected": -0.4784386157989502,
1158
+ "logps/chosen": -210.1291046142578,
1159
+ "logps/rejected": -236.04969787597656,
1160
+ "loss": 0.4347,
1161
+ "rewards/accuracies": 0.8109375238418579,
1162
+ "rewards/chosen": -1.1615896224975586,
1163
+ "rewards/margins": 1.2552942037582397,
1164
+ "rewards/rejected": -2.416883945465088,
1165
+ "step": 770
1166
+ },
1167
+ {
1168
+ "epoch": 0.5324913598156761,
1169
+ "grad_norm": 23.559487104074414,
1170
+ "learning_rate": 5.204855842185128e-07,
1171
+ "logits/chosen": -0.5264319777488708,
1172
+ "logits/rejected": -0.47137507796287537,
1173
+ "logps/chosen": -218.16024780273438,
1174
+ "logps/rejected": -245.5438995361328,
1175
+ "loss": 0.4609,
1176
+ "rewards/accuracies": 0.7703125476837158,
1177
+ "rewards/chosen": -1.2951855659484863,
1178
+ "rewards/margins": 1.170878291130066,
1179
+ "rewards/rejected": -2.4660637378692627,
1180
+ "step": 780
1181
+ },
1182
+ {
1183
+ "epoch": 0.5393181721210053,
1184
+ "grad_norm": 30.437623350555043,
1185
+ "learning_rate": 5.128983308042489e-07,
1186
+ "logits/chosen": -0.4954899251461029,
1187
+ "logits/rejected": -0.45233067870140076,
1188
+ "logps/chosen": -213.85757446289062,
1189
+ "logps/rejected": -242.7041473388672,
1190
+ "loss": 0.4193,
1191
+ "rewards/accuracies": 0.8093750476837158,
1192
+ "rewards/chosen": -1.2700811624526978,
1193
+ "rewards/margins": 1.2533843517303467,
1194
+ "rewards/rejected": -2.523465394973755,
1195
+ "step": 790
1196
+ },
1197
+ {
1198
+ "epoch": 0.5461449844263344,
1199
+ "grad_norm": 25.96035380580991,
1200
+ "learning_rate": 5.053110773899848e-07,
1201
+ "logits/chosen": -0.49867063760757446,
1202
+ "logits/rejected": -0.44984591007232666,
1203
+ "logps/chosen": -218.67074584960938,
1204
+ "logps/rejected": -247.30982971191406,
1205
+ "loss": 0.424,
1206
+ "rewards/accuracies": 0.8046875,
1207
+ "rewards/chosen": -1.237367868423462,
1208
+ "rewards/margins": 1.278685212135315,
1209
+ "rewards/rejected": -2.5160531997680664,
1210
+ "step": 800
1211
+ },
1212
+ {
1213
+ "epoch": 0.5529717967316636,
1214
+ "grad_norm": 27.066709483078917,
1215
+ "learning_rate": 4.977238239757208e-07,
1216
+ "logits/chosen": -0.4714178144931793,
1217
+ "logits/rejected": -0.4372885823249817,
1218
+ "logps/chosen": -218.98892211914062,
1219
+ "logps/rejected": -242.98770141601562,
1220
+ "loss": 0.4266,
1221
+ "rewards/accuracies": 0.7984375357627869,
1222
+ "rewards/chosen": -1.298151969909668,
1223
+ "rewards/margins": 1.222092866897583,
1224
+ "rewards/rejected": -2.520244836807251,
1225
+ "step": 810
1226
+ },
1227
+ {
1228
+ "epoch": 0.5597986090369927,
1229
+ "grad_norm": 28.230804755745105,
1230
+ "learning_rate": 4.901365705614567e-07,
1231
+ "logits/chosen": -0.45390385389328003,
1232
+ "logits/rejected": -0.43030381202697754,
1233
+ "logps/chosen": -220.013427734375,
1234
+ "logps/rejected": -241.9390411376953,
1235
+ "loss": 0.4526,
1236
+ "rewards/accuracies": 0.796875,
1237
+ "rewards/chosen": -1.354661464691162,
1238
+ "rewards/margins": 1.215053677558899,
1239
+ "rewards/rejected": -2.5697154998779297,
1240
+ "step": 820
1241
+ },
1242
+ {
1243
+ "epoch": 0.566625421342322,
1244
+ "grad_norm": 32.13534664184047,
1245
+ "learning_rate": 4.825493171471927e-07,
1246
+ "logits/chosen": -0.475396066904068,
1247
+ "logits/rejected": -0.43329310417175293,
1248
+ "logps/chosen": -210.43185424804688,
1249
+ "logps/rejected": -236.67987060546875,
1250
+ "loss": 0.4189,
1251
+ "rewards/accuracies": 0.815625011920929,
1252
+ "rewards/chosen": -1.264033317565918,
1253
+ "rewards/margins": 1.377021074295044,
1254
+ "rewards/rejected": -2.641054153442383,
1255
+ "step": 830
1256
+ },
1257
+ {
1258
+ "epoch": 0.5734522336476512,
1259
+ "grad_norm": 22.262860568714245,
1260
+ "learning_rate": 4.7496206373292864e-07,
1261
+ "logits/chosen": -0.4692656993865967,
1262
+ "logits/rejected": -0.4306912422180176,
1263
+ "logps/chosen": -211.5372772216797,
1264
+ "logps/rejected": -246.39736938476562,
1265
+ "loss": 0.3916,
1266
+ "rewards/accuracies": 0.8375000357627869,
1267
+ "rewards/chosen": -1.1525495052337646,
1268
+ "rewards/margins": 1.4558607339859009,
1269
+ "rewards/rejected": -2.608410358428955,
1270
+ "step": 840
1271
+ },
1272
+ {
1273
+ "epoch": 0.5802790459529803,
1274
+ "grad_norm": 22.80617456340079,
1275
+ "learning_rate": 4.673748103186646e-07,
1276
+ "logits/chosen": -0.46342021226882935,
1277
+ "logits/rejected": -0.41512057185173035,
1278
+ "logps/chosen": -221.32496643066406,
1279
+ "logps/rejected": -251.7954864501953,
1280
+ "loss": 0.394,
1281
+ "rewards/accuracies": 0.8218750357627869,
1282
+ "rewards/chosen": -1.2910584211349487,
1283
+ "rewards/margins": 1.4217520952224731,
1284
+ "rewards/rejected": -2.712810516357422,
1285
+ "step": 850
1286
+ },
1287
+ {
1288
+ "epoch": 0.5871058582583095,
1289
+ "grad_norm": 24.868191575194487,
1290
+ "learning_rate": 4.597875569044006e-07,
1291
+ "logits/chosen": -0.48653626441955566,
1292
+ "logits/rejected": -0.4366312623023987,
1293
+ "logps/chosen": -217.47422790527344,
1294
+ "logps/rejected": -241.48968505859375,
1295
+ "loss": 0.4269,
1296
+ "rewards/accuracies": 0.801562488079071,
1297
+ "rewards/chosen": -1.3257293701171875,
1298
+ "rewards/margins": 1.3266490697860718,
1299
+ "rewards/rejected": -2.652378559112549,
1300
+ "step": 860
1301
+ },
1302
+ {
1303
+ "epoch": 0.5939326705636387,
1304
+ "grad_norm": 27.035059402616938,
1305
+ "learning_rate": 4.5220030349013654e-07,
1306
+ "logits/chosen": -0.5033361911773682,
1307
+ "logits/rejected": -0.4694429039955139,
1308
+ "logps/chosen": -214.79815673828125,
1309
+ "logps/rejected": -237.64102172851562,
1310
+ "loss": 0.4296,
1311
+ "rewards/accuracies": 0.7921874523162842,
1312
+ "rewards/chosen": -1.3357491493225098,
1313
+ "rewards/margins": 1.2649694681167603,
1314
+ "rewards/rejected": -2.6007187366485596,
1315
+ "step": 870
1316
+ },
1317
+ {
1318
+ "epoch": 0.6007594828689679,
1319
+ "grad_norm": 27.746278145893346,
1320
+ "learning_rate": 4.446130500758725e-07,
1321
+ "logits/chosen": -0.5227242708206177,
1322
+ "logits/rejected": -0.4751604497432709,
1323
+ "logps/chosen": -218.23658752441406,
1324
+ "logps/rejected": -249.3454132080078,
1325
+ "loss": 0.4233,
1326
+ "rewards/accuracies": 0.817187488079071,
1327
+ "rewards/chosen": -1.3457627296447754,
1328
+ "rewards/margins": 1.428666591644287,
1329
+ "rewards/rejected": -2.7744295597076416,
1330
+ "step": 880
1331
+ },
1332
+ {
1333
+ "epoch": 0.6075862951742971,
1334
+ "grad_norm": 26.892931653503698,
1335
+ "learning_rate": 4.370257966616085e-07,
1336
+ "logits/chosen": -0.5066260099411011,
1337
+ "logits/rejected": -0.47855502367019653,
1338
+ "logps/chosen": -214.84915161132812,
1339
+ "logps/rejected": -240.56436157226562,
1340
+ "loss": 0.4612,
1341
+ "rewards/accuracies": 0.7812500596046448,
1342
+ "rewards/chosen": -1.4467679262161255,
1343
+ "rewards/margins": 1.3007091283798218,
1344
+ "rewards/rejected": -2.7474770545959473,
1345
+ "step": 890
1346
+ },
1347
+ {
1348
+ "epoch": 0.6144131074796262,
1349
+ "grad_norm": 32.793455771900234,
1350
+ "learning_rate": 4.2943854324734444e-07,
1351
+ "logits/chosen": -0.4987248182296753,
1352
+ "logits/rejected": -0.4517776668071747,
1353
+ "logps/chosen": -218.49545288085938,
1354
+ "logps/rejected": -252.3199462890625,
1355
+ "loss": 0.4007,
1356
+ "rewards/accuracies": 0.828125,
1357
+ "rewards/chosen": -1.4036812782287598,
1358
+ "rewards/margins": 1.497314453125,
1359
+ "rewards/rejected": -2.9009957313537598,
1360
+ "step": 900
1361
+ },
1362
+ {
1363
+ "epoch": 0.6212399197849554,
1364
+ "grad_norm": 30.14977908240741,
1365
+ "learning_rate": 4.2185128983308036e-07,
1366
+ "logits/chosen": -0.5123909711837769,
1367
+ "logits/rejected": -0.456384539604187,
1368
+ "logps/chosen": -221.94183349609375,
1369
+ "logps/rejected": -250.5224151611328,
1370
+ "loss": 0.4185,
1371
+ "rewards/accuracies": 0.8265625238418579,
1372
+ "rewards/chosen": -1.3800506591796875,
1373
+ "rewards/margins": 1.4040327072143555,
1374
+ "rewards/rejected": -2.784083366394043,
1375
+ "step": 910
1376
+ },
1377
+ {
1378
+ "epoch": 0.6280667320902846,
1379
+ "grad_norm": 23.187149506889586,
1380
+ "learning_rate": 4.142640364188164e-07,
1381
+ "logits/chosen": -0.5007960200309753,
1382
+ "logits/rejected": -0.4656420350074768,
1383
+ "logps/chosen": -224.66000366210938,
1384
+ "logps/rejected": -250.5994873046875,
1385
+ "loss": 0.4194,
1386
+ "rewards/accuracies": 0.817187488079071,
1387
+ "rewards/chosen": -1.4466440677642822,
1388
+ "rewards/margins": 1.3647561073303223,
1389
+ "rewards/rejected": -2.8114004135131836,
1390
+ "step": 920
1391
+ },
1392
+ {
1393
+ "epoch": 0.6348935443956137,
1394
+ "grad_norm": 26.465496977643166,
1395
+ "learning_rate": 4.0667678300455234e-07,
1396
+ "logits/chosen": -0.5095345973968506,
1397
+ "logits/rejected": -0.44781219959259033,
1398
+ "logps/chosen": -219.541259765625,
1399
+ "logps/rejected": -253.14544677734375,
1400
+ "loss": 0.3631,
1401
+ "rewards/accuracies": 0.8765624761581421,
1402
+ "rewards/chosen": -1.3718998432159424,
1403
+ "rewards/margins": 1.6033210754394531,
1404
+ "rewards/rejected": -2.9752209186553955,
1405
+ "step": 930
1406
+ },
1407
+ {
1408
+ "epoch": 0.641720356700943,
1409
+ "grad_norm": 21.651167586614733,
1410
+ "learning_rate": 3.990895295902883e-07,
1411
+ "logits/chosen": -0.5611530542373657,
1412
+ "logits/rejected": -0.5065969824790955,
1413
+ "logps/chosen": -222.84457397460938,
1414
+ "logps/rejected": -251.35067749023438,
1415
+ "loss": 0.397,
1416
+ "rewards/accuracies": 0.8250000476837158,
1417
+ "rewards/chosen": -1.4304229021072388,
1418
+ "rewards/margins": 1.4556035995483398,
1419
+ "rewards/rejected": -2.886026620864868,
1420
+ "step": 940
1421
+ },
1422
+ {
1423
+ "epoch": 0.6485471690062722,
1424
+ "grad_norm": 21.56653990852637,
1425
+ "learning_rate": 3.915022761760243e-07,
1426
+ "logits/chosen": -0.575349748134613,
1427
+ "logits/rejected": -0.5415146350860596,
1428
+ "logps/chosen": -209.71266174316406,
1429
+ "logps/rejected": -239.22946166992188,
1430
+ "loss": 0.4001,
1431
+ "rewards/accuracies": 0.8187500238418579,
1432
+ "rewards/chosen": -1.521388053894043,
1433
+ "rewards/margins": 1.4791213274002075,
1434
+ "rewards/rejected": -3.000509262084961,
1435
+ "step": 950
1436
+ },
1437
+ {
1438
+ "epoch": 0.6553739813116013,
1439
+ "grad_norm": 23.31036794244746,
1440
+ "learning_rate": 3.8391502276176024e-07,
1441
+ "logits/chosen": -0.5698951482772827,
1442
+ "logits/rejected": -0.5178714394569397,
1443
+ "logps/chosen": -228.25030517578125,
1444
+ "logps/rejected": -261.415771484375,
1445
+ "loss": 0.3891,
1446
+ "rewards/accuracies": 0.817187488079071,
1447
+ "rewards/chosen": -1.6143665313720703,
1448
+ "rewards/margins": 1.645197868347168,
1449
+ "rewards/rejected": -3.2595643997192383,
1450
+ "step": 960
1451
+ },
1452
+ {
1453
+ "epoch": 0.6622007936169305,
1454
+ "grad_norm": 26.214223596010875,
1455
+ "learning_rate": 3.763277693474962e-07,
1456
+ "logits/chosen": -0.5214463472366333,
1457
+ "logits/rejected": -0.46749287843704224,
1458
+ "logps/chosen": -218.10549926757812,
1459
+ "logps/rejected": -251.87442016601562,
1460
+ "loss": 0.4196,
1461
+ "rewards/accuracies": 0.8125,
1462
+ "rewards/chosen": -1.63547945022583,
1463
+ "rewards/margins": 1.4419658184051514,
1464
+ "rewards/rejected": -3.0774452686309814,
1465
+ "step": 970
1466
+ },
1467
+ {
1468
+ "epoch": 0.6690276059222596,
1469
+ "grad_norm": 24.89349466924626,
1470
+ "learning_rate": 3.687405159332321e-07,
1471
+ "logits/chosen": -0.5419428944587708,
1472
+ "logits/rejected": -0.5022714734077454,
1473
+ "logps/chosen": -223.1068115234375,
1474
+ "logps/rejected": -255.94949340820312,
1475
+ "loss": 0.4144,
1476
+ "rewards/accuracies": 0.815625011920929,
1477
+ "rewards/chosen": -1.7430050373077393,
1478
+ "rewards/margins": 1.4413095712661743,
1479
+ "rewards/rejected": -3.184314489364624,
1480
+ "step": 980
1481
+ },
1482
+ {
1483
+ "epoch": 0.6758544182275888,
1484
+ "grad_norm": 25.914909518247867,
1485
+ "learning_rate": 3.611532625189681e-07,
1486
+ "logits/chosen": -0.5115488767623901,
1487
+ "logits/rejected": -0.4625004827976227,
1488
+ "logps/chosen": -229.49105834960938,
1489
+ "logps/rejected": -265.0625,
1490
+ "loss": 0.3983,
1491
+ "rewards/accuracies": 0.8218750357627869,
1492
+ "rewards/chosen": -1.6320453882217407,
1493
+ "rewards/margins": 1.5734854936599731,
1494
+ "rewards/rejected": -3.2055306434631348,
1495
+ "step": 990
1496
+ },
1497
+ {
1498
+ "epoch": 0.6826812305329181,
1499
+ "grad_norm": 31.456143694319483,
1500
+ "learning_rate": 3.5356600910470406e-07,
1501
+ "logits/chosen": -0.5371730327606201,
1502
+ "logits/rejected": -0.4974362850189209,
1503
+ "logps/chosen": -236.7477569580078,
1504
+ "logps/rejected": -264.0472106933594,
1505
+ "loss": 0.3827,
1506
+ "rewards/accuracies": 0.8437500596046448,
1507
+ "rewards/chosen": -1.620214819908142,
1508
+ "rewards/margins": 1.5398459434509277,
1509
+ "rewards/rejected": -3.1600606441497803,
1510
+ "step": 1000
1511
+ },
1512
+ {
1513
+ "epoch": 0.6895080428382472,
1514
+ "grad_norm": 29.661159656571126,
1515
+ "learning_rate": 3.459787556904401e-07,
1516
+ "logits/chosen": -0.5440015196800232,
1517
+ "logits/rejected": -0.49301889538764954,
1518
+ "logps/chosen": -224.03494262695312,
1519
+ "logps/rejected": -254.42193603515625,
1520
+ "loss": 0.4033,
1521
+ "rewards/accuracies": 0.8296875357627869,
1522
+ "rewards/chosen": -1.5924382209777832,
1523
+ "rewards/margins": 1.5776193141937256,
1524
+ "rewards/rejected": -3.170057773590088,
1525
+ "step": 1010
1526
+ },
1527
+ {
1528
+ "epoch": 0.6963348551435764,
1529
+ "grad_norm": 38.12069128333079,
1530
+ "learning_rate": 3.3839150227617604e-07,
1531
+ "logits/chosen": -0.5860447883605957,
1532
+ "logits/rejected": -0.543270468711853,
1533
+ "logps/chosen": -228.84930419921875,
1534
+ "logps/rejected": -262.8966064453125,
1535
+ "loss": 0.3898,
1536
+ "rewards/accuracies": 0.8406250476837158,
1537
+ "rewards/chosen": -1.6053173542022705,
1538
+ "rewards/margins": 1.590077519416809,
1539
+ "rewards/rejected": -3.19539475440979,
1540
+ "step": 1020
1541
+ },
1542
+ {
1543
+ "epoch": 0.7031616674489056,
1544
+ "grad_norm": 32.08364090632609,
1545
+ "learning_rate": 3.30804248861912e-07,
1546
+ "logits/chosen": -0.6051906943321228,
1547
+ "logits/rejected": -0.5597983598709106,
1548
+ "logps/chosen": -224.02899169921875,
1549
+ "logps/rejected": -258.93511962890625,
1550
+ "loss": 0.396,
1551
+ "rewards/accuracies": 0.8171875476837158,
1552
+ "rewards/chosen": -1.7182796001434326,
1553
+ "rewards/margins": 1.5724890232086182,
1554
+ "rewards/rejected": -3.290768623352051,
1555
+ "step": 1030
1556
+ },
1557
+ {
1558
+ "epoch": 0.7099884797542347,
1559
+ "grad_norm": 25.599680429412086,
1560
+ "learning_rate": 3.232169954476479e-07,
1561
+ "logits/chosen": -0.6112679243087769,
1562
+ "logits/rejected": -0.5801026821136475,
1563
+ "logps/chosen": -225.71258544921875,
1564
+ "logps/rejected": -264.3663330078125,
1565
+ "loss": 0.3637,
1566
+ "rewards/accuracies": 0.8421875238418579,
1567
+ "rewards/chosen": -1.4613301753997803,
1568
+ "rewards/margins": 1.712023138999939,
1569
+ "rewards/rejected": -3.1733531951904297,
1570
+ "step": 1040
1571
+ },
1572
+ {
1573
+ "epoch": 0.716815292059564,
1574
+ "grad_norm": 26.325121380352627,
1575
+ "learning_rate": 3.156297420333839e-07,
1576
+ "logits/chosen": -0.6216264963150024,
1577
+ "logits/rejected": -0.5548665523529053,
1578
+ "logps/chosen": -226.58059692382812,
1579
+ "logps/rejected": -263.7754821777344,
1580
+ "loss": 0.3636,
1581
+ "rewards/accuracies": 0.8374999761581421,
1582
+ "rewards/chosen": -1.6483052968978882,
1583
+ "rewards/margins": 1.7705044746398926,
1584
+ "rewards/rejected": -3.4188098907470703,
1585
+ "step": 1050
1586
+ },
1587
+ {
1588
+ "epoch": 0.7236421043648931,
1589
+ "grad_norm": 23.347203569226366,
1590
+ "learning_rate": 3.0804248861911986e-07,
1591
+ "logits/chosen": -0.5403355360031128,
1592
+ "logits/rejected": -0.49409806728363037,
1593
+ "logps/chosen": -225.88253784179688,
1594
+ "logps/rejected": -256.93182373046875,
1595
+ "loss": 0.393,
1596
+ "rewards/accuracies": 0.831250011920929,
1597
+ "rewards/chosen": -1.6747300624847412,
1598
+ "rewards/margins": 1.6634035110473633,
1599
+ "rewards/rejected": -3.3381335735321045,
1600
+ "step": 1060
1601
+ },
1602
+ {
1603
+ "epoch": 0.7304689166702223,
1604
+ "grad_norm": 26.591582696664684,
1605
+ "learning_rate": 3.004552352048558e-07,
1606
+ "logits/chosen": -0.60378497838974,
1607
+ "logits/rejected": -0.5446761250495911,
1608
+ "logps/chosen": -222.86285400390625,
1609
+ "logps/rejected": -254.32901000976562,
1610
+ "loss": 0.3562,
1611
+ "rewards/accuracies": 0.8500000238418579,
1612
+ "rewards/chosen": -1.373286247253418,
1613
+ "rewards/margins": 1.5952813625335693,
1614
+ "rewards/rejected": -2.9685676097869873,
1615
+ "step": 1070
1616
+ },
1617
+ {
1618
+ "epoch": 0.7372957289755515,
1619
+ "grad_norm": 26.301256433411677,
1620
+ "learning_rate": 2.928679817905918e-07,
1621
+ "logits/chosen": -0.575655996799469,
1622
+ "logits/rejected": -0.5388238430023193,
1623
+ "logps/chosen": -226.25411987304688,
1624
+ "logps/rejected": -257.7029724121094,
1625
+ "loss": 0.3889,
1626
+ "rewards/accuracies": 0.832812488079071,
1627
+ "rewards/chosen": -1.5678967237472534,
1628
+ "rewards/margins": 1.5496362447738647,
1629
+ "rewards/rejected": -3.1175332069396973,
1630
+ "step": 1080
1631
+ },
1632
+ {
1633
+ "epoch": 0.7441225412808806,
1634
+ "grad_norm": 29.1969544488184,
1635
+ "learning_rate": 2.8528072837632776e-07,
1636
+ "logits/chosen": -0.563581109046936,
1637
+ "logits/rejected": -0.4889605939388275,
1638
+ "logps/chosen": -215.546630859375,
1639
+ "logps/rejected": -251.0224609375,
1640
+ "loss": 0.3594,
1641
+ "rewards/accuracies": 0.854687511920929,
1642
+ "rewards/chosen": -1.5211578607559204,
1643
+ "rewards/margins": 1.6970359086990356,
1644
+ "rewards/rejected": -3.218193531036377,
1645
+ "step": 1090
1646
+ },
1647
+ {
1648
+ "epoch": 0.7509493535862098,
1649
+ "grad_norm": 28.75255873182244,
1650
+ "learning_rate": 2.776934749620637e-07,
1651
+ "logits/chosen": -0.5607287883758545,
1652
+ "logits/rejected": -0.5297821760177612,
1653
+ "logps/chosen": -213.63365173339844,
1654
+ "logps/rejected": -240.619384765625,
1655
+ "loss": 0.4057,
1656
+ "rewards/accuracies": 0.8203125,
1657
+ "rewards/chosen": -1.6024796962738037,
1658
+ "rewards/margins": 1.537475347518921,
1659
+ "rewards/rejected": -3.1399548053741455,
1660
+ "step": 1100
1661
+ },
1662
+ {
1663
+ "epoch": 0.7577761658915391,
1664
+ "grad_norm": 28.027697277996715,
1665
+ "learning_rate": 2.7010622154779964e-07,
1666
+ "logits/chosen": -0.5775099992752075,
1667
+ "logits/rejected": -0.5231542587280273,
1668
+ "logps/chosen": -224.80667114257812,
1669
+ "logps/rejected": -259.0721435546875,
1670
+ "loss": 0.4044,
1671
+ "rewards/accuracies": 0.8140624761581421,
1672
+ "rewards/chosen": -1.598193883895874,
1673
+ "rewards/margins": 1.5613579750061035,
1674
+ "rewards/rejected": -3.1595516204833984,
1675
+ "step": 1110
1676
+ },
1677
+ {
1678
+ "epoch": 0.7646029781968682,
1679
+ "grad_norm": 19.772049611357087,
1680
+ "learning_rate": 2.6251896813353566e-07,
1681
+ "logits/chosen": -0.5745671987533569,
1682
+ "logits/rejected": -0.5299438834190369,
1683
+ "logps/chosen": -225.1347198486328,
1684
+ "logps/rejected": -255.4309539794922,
1685
+ "loss": 0.3858,
1686
+ "rewards/accuracies": 0.8187500238418579,
1687
+ "rewards/chosen": -1.4894109964370728,
1688
+ "rewards/margins": 1.691686987876892,
1689
+ "rewards/rejected": -3.181097984313965,
1690
+ "step": 1120
1691
+ },
1692
+ {
1693
+ "epoch": 0.7714297905021974,
1694
+ "grad_norm": 23.20450105175028,
1695
+ "learning_rate": 2.549317147192716e-07,
1696
+ "logits/chosen": -0.5600322484970093,
1697
+ "logits/rejected": -0.5002347230911255,
1698
+ "logps/chosen": -219.86434936523438,
1699
+ "logps/rejected": -253.78878784179688,
1700
+ "loss": 0.3663,
1701
+ "rewards/accuracies": 0.8453125357627869,
1702
+ "rewards/chosen": -1.4636483192443848,
1703
+ "rewards/margins": 1.6817249059677124,
1704
+ "rewards/rejected": -3.1453733444213867,
1705
+ "step": 1130
1706
+ },
1707
+ {
1708
+ "epoch": 0.7782566028075265,
1709
+ "grad_norm": 28.72150866508454,
1710
+ "learning_rate": 2.473444613050076e-07,
1711
+ "logits/chosen": -0.6041327118873596,
1712
+ "logits/rejected": -0.5645285844802856,
1713
+ "logps/chosen": -216.68939208984375,
1714
+ "logps/rejected": -247.66275024414062,
1715
+ "loss": 0.3806,
1716
+ "rewards/accuracies": 0.8328125476837158,
1717
+ "rewards/chosen": -1.5338340997695923,
1718
+ "rewards/margins": 1.5916988849639893,
1719
+ "rewards/rejected": -3.125532865524292,
1720
+ "step": 1140
1721
+ },
1722
+ {
1723
+ "epoch": 0.7850834151128557,
1724
+ "grad_norm": 29.858461214238897,
1725
+ "learning_rate": 2.3975720789074356e-07,
1726
+ "logits/chosen": -0.6299252510070801,
1727
+ "logits/rejected": -0.586955189704895,
1728
+ "logps/chosen": -231.401611328125,
1729
+ "logps/rejected": -263.02197265625,
1730
+ "loss": 0.3998,
1731
+ "rewards/accuracies": 0.8328125476837158,
1732
+ "rewards/chosen": -1.6045633554458618,
1733
+ "rewards/margins": 1.6497775316238403,
1734
+ "rewards/rejected": -3.2543411254882812,
1735
+ "step": 1150
1736
+ },
1737
+ {
1738
+ "epoch": 0.791910227418185,
1739
+ "grad_norm": 26.24413163476253,
1740
+ "learning_rate": 2.321699544764795e-07,
1741
+ "logits/chosen": -0.5830259919166565,
1742
+ "logits/rejected": -0.5397896766662598,
1743
+ "logps/chosen": -213.19375610351562,
1744
+ "logps/rejected": -249.24717712402344,
1745
+ "loss": 0.3717,
1746
+ "rewards/accuracies": 0.8250000476837158,
1747
+ "rewards/chosen": -1.6501479148864746,
1748
+ "rewards/margins": 1.6961115598678589,
1749
+ "rewards/rejected": -3.346259593963623,
1750
+ "step": 1160
1751
+ },
1752
+ {
1753
+ "epoch": 0.7987370397235141,
1754
+ "grad_norm": 31.016581977192125,
1755
+ "learning_rate": 2.2458270106221546e-07,
1756
+ "logits/chosen": -0.5983390808105469,
1757
+ "logits/rejected": -0.5455670952796936,
1758
+ "logps/chosen": -224.10618591308594,
1759
+ "logps/rejected": -254.94383239746094,
1760
+ "loss": 0.3732,
1761
+ "rewards/accuracies": 0.8296875357627869,
1762
+ "rewards/chosen": -1.5914267301559448,
1763
+ "rewards/margins": 1.632917046546936,
1764
+ "rewards/rejected": -3.2243435382843018,
1765
+ "step": 1170
1766
+ },
1767
+ {
1768
+ "epoch": 0.8055638520288433,
1769
+ "grad_norm": 82.84012389678055,
1770
+ "learning_rate": 2.1699544764795143e-07,
1771
+ "logits/chosen": -0.6019859910011292,
1772
+ "logits/rejected": -0.5678104758262634,
1773
+ "logps/chosen": -222.878662109375,
1774
+ "logps/rejected": -253.78060913085938,
1775
+ "loss": 0.4147,
1776
+ "rewards/accuracies": 0.828125,
1777
+ "rewards/chosen": -1.6500358581542969,
1778
+ "rewards/margins": 1.5844390392303467,
1779
+ "rewards/rejected": -3.2344746589660645,
1780
+ "step": 1180
1781
+ },
1782
+ {
1783
+ "epoch": 0.8123906643341725,
1784
+ "grad_norm": 22.55342908794488,
1785
+ "learning_rate": 2.094081942336874e-07,
1786
+ "logits/chosen": -0.5819066762924194,
1787
+ "logits/rejected": -0.5418481826782227,
1788
+ "logps/chosen": -221.70608520507812,
1789
+ "logps/rejected": -254.09922790527344,
1790
+ "loss": 0.3354,
1791
+ "rewards/accuracies": 0.8609375357627869,
1792
+ "rewards/chosen": -1.6243677139282227,
1793
+ "rewards/margins": 1.7402938604354858,
1794
+ "rewards/rejected": -3.364661455154419,
1795
+ "step": 1190
1796
+ },
1797
+ {
1798
+ "epoch": 0.8192174766395016,
1799
+ "grad_norm": 21.249823285036445,
1800
+ "learning_rate": 2.0182094081942336e-07,
1801
+ "logits/chosen": -0.5841631889343262,
1802
+ "logits/rejected": -0.5415323972702026,
1803
+ "logps/chosen": -225.88800048828125,
1804
+ "logps/rejected": -254.038818359375,
1805
+ "loss": 0.3821,
1806
+ "rewards/accuracies": 0.839062511920929,
1807
+ "rewards/chosen": -1.6330121755599976,
1808
+ "rewards/margins": 1.732587456703186,
1809
+ "rewards/rejected": -3.3655996322631836,
1810
+ "step": 1200
1811
+ },
1812
+ {
1813
+ "epoch": 0.8260442889448308,
1814
+ "grad_norm": 22.957761561567523,
1815
+ "learning_rate": 1.9423368740515933e-07,
1816
+ "logits/chosen": -0.5876274704933167,
1817
+ "logits/rejected": -0.5527446866035461,
1818
+ "logps/chosen": -237.04470825195312,
1819
+ "logps/rejected": -263.58868408203125,
1820
+ "loss": 0.3658,
1821
+ "rewards/accuracies": 0.8531250357627869,
1822
+ "rewards/chosen": -1.6271567344665527,
1823
+ "rewards/margins": 1.6703208684921265,
1824
+ "rewards/rejected": -3.297477960586548,
1825
+ "step": 1210
1826
+ },
1827
+ {
1828
+ "epoch": 0.83287110125016,
1829
+ "grad_norm": 26.3109466733547,
1830
+ "learning_rate": 1.8664643399089527e-07,
1831
+ "logits/chosen": -0.5855602622032166,
1832
+ "logits/rejected": -0.5348464846611023,
1833
+ "logps/chosen": -220.74581909179688,
1834
+ "logps/rejected": -259.97076416015625,
1835
+ "loss": 0.392,
1836
+ "rewards/accuracies": 0.8234375715255737,
1837
+ "rewards/chosen": -1.666372299194336,
1838
+ "rewards/margins": 1.7341811656951904,
1839
+ "rewards/rejected": -3.4005534648895264,
1840
+ "step": 1220
1841
+ },
1842
+ {
1843
+ "epoch": 0.8396979135554892,
1844
+ "grad_norm": 32.86005475979103,
1845
+ "learning_rate": 1.7905918057663124e-07,
1846
+ "logits/chosen": -0.6146824359893799,
1847
+ "logits/rejected": -0.5769205093383789,
1848
+ "logps/chosen": -223.04859924316406,
1849
+ "logps/rejected": -259.2931213378906,
1850
+ "loss": 0.3747,
1851
+ "rewards/accuracies": 0.8484375476837158,
1852
+ "rewards/chosen": -1.6388548612594604,
1853
+ "rewards/margins": 1.6829884052276611,
1854
+ "rewards/rejected": -3.321843147277832,
1855
+ "step": 1230
1856
+ },
1857
+ {
1858
+ "epoch": 0.8465247258608184,
1859
+ "grad_norm": 27.824013672905682,
1860
+ "learning_rate": 1.7147192716236723e-07,
1861
+ "logits/chosen": -0.5848041772842407,
1862
+ "logits/rejected": -0.5365484356880188,
1863
+ "logps/chosen": -224.9688262939453,
1864
+ "logps/rejected": -253.75857543945312,
1865
+ "loss": 0.374,
1866
+ "rewards/accuracies": 0.8343750238418579,
1867
+ "rewards/chosen": -1.633022427558899,
1868
+ "rewards/margins": 1.6262296438217163,
1869
+ "rewards/rejected": -3.2592520713806152,
1870
+ "step": 1240
1871
+ },
1872
+ {
1873
+ "epoch": 0.8533515381661475,
1874
+ "grad_norm": 28.870976428951412,
1875
+ "learning_rate": 1.638846737481032e-07,
1876
+ "logits/chosen": -0.6266176700592041,
1877
+ "logits/rejected": -0.5750494003295898,
1878
+ "logps/chosen": -225.53489685058594,
1879
+ "logps/rejected": -251.16812133789062,
1880
+ "loss": 0.3643,
1881
+ "rewards/accuracies": 0.8421875238418579,
1882
+ "rewards/chosen": -1.6029326915740967,
1883
+ "rewards/margins": 1.637751817703247,
1884
+ "rewards/rejected": -3.2406845092773438,
1885
+ "step": 1250
1886
+ },
1887
+ {
1888
+ "epoch": 0.8601783504714767,
1889
+ "grad_norm": 28.44671682958466,
1890
+ "learning_rate": 1.5629742033383914e-07,
1891
+ "logits/chosen": -0.5748768448829651,
1892
+ "logits/rejected": -0.5039246082305908,
1893
+ "logps/chosen": -229.083740234375,
1894
+ "logps/rejected": -265.5872802734375,
1895
+ "loss": 0.3464,
1896
+ "rewards/accuracies": 0.8515625,
1897
+ "rewards/chosen": -1.6385741233825684,
1898
+ "rewards/margins": 2.006284713745117,
1899
+ "rewards/rejected": -3.6448588371276855,
1900
+ "step": 1260
1901
+ },
1902
+ {
1903
+ "epoch": 0.867005162776806,
1904
+ "grad_norm": 26.03554320093484,
1905
+ "learning_rate": 1.487101669195751e-07,
1906
+ "logits/chosen": -0.580173671245575,
1907
+ "logits/rejected": -0.5294475555419922,
1908
+ "logps/chosen": -225.72938537597656,
1909
+ "logps/rejected": -262.03546142578125,
1910
+ "loss": 0.3718,
1911
+ "rewards/accuracies": 0.8359375,
1912
+ "rewards/chosen": -1.6143238544464111,
1913
+ "rewards/margins": 1.8427155017852783,
1914
+ "rewards/rejected": -3.4570393562316895,
1915
+ "step": 1270
1916
+ },
1917
+ {
1918
+ "epoch": 0.8738319750821351,
1919
+ "grad_norm": 22.97729500897279,
1920
+ "learning_rate": 1.4112291350531107e-07,
1921
+ "logits/chosen": -0.6003884673118591,
1922
+ "logits/rejected": -0.5561665296554565,
1923
+ "logps/chosen": -221.987548828125,
1924
+ "logps/rejected": -258.51727294921875,
1925
+ "loss": 0.3686,
1926
+ "rewards/accuracies": 0.8359375,
1927
+ "rewards/chosen": -1.4877190589904785,
1928
+ "rewards/margins": 1.6500287055969238,
1929
+ "rewards/rejected": -3.1377477645874023,
1930
+ "step": 1280
1931
+ },
1932
+ {
1933
+ "epoch": 0.8806587873874643,
1934
+ "grad_norm": 31.37447822214391,
1935
+ "learning_rate": 1.3353566009104704e-07,
1936
+ "logits/chosen": -0.6364210844039917,
1937
+ "logits/rejected": -0.575194239616394,
1938
+ "logps/chosen": -225.1094207763672,
1939
+ "logps/rejected": -260.13885498046875,
1940
+ "loss": 0.3534,
1941
+ "rewards/accuracies": 0.864062488079071,
1942
+ "rewards/chosen": -1.6206319332122803,
1943
+ "rewards/margins": 1.7905977964401245,
1944
+ "rewards/rejected": -3.4112298488616943,
1945
+ "step": 1290
1946
+ },
1947
+ {
1948
+ "epoch": 0.8874855996927934,
1949
+ "grad_norm": 22.936789815076953,
1950
+ "learning_rate": 1.25948406676783e-07,
1951
+ "logits/chosen": -0.6323338747024536,
1952
+ "logits/rejected": -0.6003640294075012,
1953
+ "logps/chosen": -227.20034790039062,
1954
+ "logps/rejected": -259.46502685546875,
1955
+ "loss": 0.3575,
1956
+ "rewards/accuracies": 0.8406250476837158,
1957
+ "rewards/chosen": -1.6749684810638428,
1958
+ "rewards/margins": 1.7170754671096802,
1959
+ "rewards/rejected": -3.3920438289642334,
1960
+ "step": 1300
1961
+ },
1962
+ {
1963
+ "epoch": 0.8943124119981226,
1964
+ "grad_norm": 22.489511604558004,
1965
+ "learning_rate": 1.1836115326251896e-07,
1966
+ "logits/chosen": -0.6401182413101196,
1967
+ "logits/rejected": -0.5833394527435303,
1968
+ "logps/chosen": -223.30029296875,
1969
+ "logps/rejected": -262.72998046875,
1970
+ "loss": 0.3353,
1971
+ "rewards/accuracies": 0.8593750596046448,
1972
+ "rewards/chosen": -1.560599446296692,
1973
+ "rewards/margins": 1.906503677368164,
1974
+ "rewards/rejected": -3.4671034812927246,
1975
+ "step": 1310
1976
+ },
1977
+ {
1978
+ "epoch": 0.9011392243034518,
1979
+ "grad_norm": 37.43162732034228,
1980
+ "learning_rate": 1.1077389984825493e-07,
1981
+ "logits/chosen": -0.5761069059371948,
1982
+ "logits/rejected": -0.5430048108100891,
1983
+ "logps/chosen": -237.7594757080078,
1984
+ "logps/rejected": -275.5934753417969,
1985
+ "loss": 0.3514,
1986
+ "rewards/accuracies": 0.859375,
1987
+ "rewards/chosen": -1.6714935302734375,
1988
+ "rewards/margins": 1.8643473386764526,
1989
+ "rewards/rejected": -3.5358407497406006,
1990
+ "step": 1320
1991
+ },
1992
+ {
1993
+ "epoch": 0.907966036608781,
1994
+ "grad_norm": 22.988879587386872,
1995
+ "learning_rate": 1.0318664643399089e-07,
1996
+ "logits/chosen": -0.5806565284729004,
1997
+ "logits/rejected": -0.5450279116630554,
1998
+ "logps/chosen": -221.33053588867188,
1999
+ "logps/rejected": -256.5147705078125,
2000
+ "loss": 0.3729,
2001
+ "rewards/accuracies": 0.856249988079071,
2002
+ "rewards/chosen": -1.6562050580978394,
2003
+ "rewards/margins": 1.747424840927124,
2004
+ "rewards/rejected": -3.403630018234253,
2005
+ "step": 1330
2006
+ },
2007
+ {
2008
+ "epoch": 0.9147928489141102,
2009
+ "grad_norm": 19.80848176554877,
2010
+ "learning_rate": 9.559939301972686e-08,
2011
+ "logits/chosen": -0.6481366157531738,
2012
+ "logits/rejected": -0.6148696541786194,
2013
+ "logps/chosen": -224.6954803466797,
2014
+ "logps/rejected": -256.4845275878906,
2015
+ "loss": 0.3775,
2016
+ "rewards/accuracies": 0.8421875238418579,
2017
+ "rewards/chosen": -1.7428375482559204,
2018
+ "rewards/margins": 1.636692762374878,
2019
+ "rewards/rejected": -3.379530191421509,
2020
+ "step": 1340
2021
+ },
2022
+ {
2023
+ "epoch": 0.9216196612194394,
2024
+ "grad_norm": 25.8470434123946,
2025
+ "learning_rate": 8.801213960546281e-08,
2026
+ "logits/chosen": -0.6496397852897644,
2027
+ "logits/rejected": -0.5912147164344788,
2028
+ "logps/chosen": -223.9413299560547,
2029
+ "logps/rejected": -259.1372375488281,
2030
+ "loss": 0.3461,
2031
+ "rewards/accuracies": 0.856249988079071,
2032
+ "rewards/chosen": -1.6729114055633545,
2033
+ "rewards/margins": 1.7730145454406738,
2034
+ "rewards/rejected": -3.445925712585449,
2035
+ "step": 1350
2036
+ },
2037
+ {
2038
+ "epoch": 0.9284464735247685,
2039
+ "grad_norm": 33.2201336722171,
2040
+ "learning_rate": 8.042488619119878e-08,
2041
+ "logits/chosen": -0.645717203617096,
2042
+ "logits/rejected": -0.6112032532691956,
2043
+ "logps/chosen": -225.99624633789062,
2044
+ "logps/rejected": -257.4811706542969,
2045
+ "loss": 0.4065,
2046
+ "rewards/accuracies": 0.8218750357627869,
2047
+ "rewards/chosen": -1.761589527130127,
2048
+ "rewards/margins": 1.6280558109283447,
2049
+ "rewards/rejected": -3.389645576477051,
2050
+ "step": 1360
2051
+ },
2052
+ {
2053
+ "epoch": 0.9352732858300977,
2054
+ "grad_norm": 27.005710517490183,
2055
+ "learning_rate": 7.283763277693475e-08,
2056
+ "logits/chosen": -0.573918342590332,
2057
+ "logits/rejected": -0.5335432291030884,
2058
+ "logps/chosen": -225.52552795410156,
2059
+ "logps/rejected": -255.49449157714844,
2060
+ "loss": 0.3465,
2061
+ "rewards/accuracies": 0.8531249761581421,
2062
+ "rewards/chosen": -1.7273519039154053,
2063
+ "rewards/margins": 1.7527152299880981,
2064
+ "rewards/rejected": -3.480067253112793,
2065
+ "step": 1370
2066
+ },
2067
+ {
2068
+ "epoch": 0.9421000981354269,
2069
+ "grad_norm": 32.140399259495645,
2070
+ "learning_rate": 6.525037936267071e-08,
2071
+ "logits/chosen": -0.6214314103126526,
2072
+ "logits/rejected": -0.570462167263031,
2073
+ "logps/chosen": -224.70672607421875,
2074
+ "logps/rejected": -264.4761962890625,
2075
+ "loss": 0.3218,
2076
+ "rewards/accuracies": 0.887499988079071,
2077
+ "rewards/chosen": -1.6792542934417725,
2078
+ "rewards/margins": 1.915861964225769,
2079
+ "rewards/rejected": -3.595116138458252,
2080
+ "step": 1380
2081
+ },
2082
+ {
2083
+ "epoch": 0.948926910440756,
2084
+ "grad_norm": 31.250154294424732,
2085
+ "learning_rate": 5.766312594840667e-08,
2086
+ "logits/chosen": -0.6339004635810852,
2087
+ "logits/rejected": -0.5892723798751831,
2088
+ "logps/chosen": -220.26611328125,
2089
+ "logps/rejected": -252.96212768554688,
2090
+ "loss": 0.3864,
2091
+ "rewards/accuracies": 0.8312499523162842,
2092
+ "rewards/chosen": -1.6645467281341553,
2093
+ "rewards/margins": 1.5790960788726807,
2094
+ "rewards/rejected": -3.243642807006836,
2095
+ "step": 1390
2096
+ },
2097
+ {
2098
+ "epoch": 0.9557537227460853,
2099
+ "grad_norm": 30.068762957187783,
2100
+ "learning_rate": 5.007587253414264e-08,
2101
+ "logits/chosen": -0.678811252117157,
2102
+ "logits/rejected": -0.6359538435935974,
2103
+ "logps/chosen": -224.49069213867188,
2104
+ "logps/rejected": -258.3272705078125,
2105
+ "loss": 0.3447,
2106
+ "rewards/accuracies": 0.8531250357627869,
2107
+ "rewards/chosen": -1.575748085975647,
2108
+ "rewards/margins": 1.9220972061157227,
2109
+ "rewards/rejected": -3.49784517288208,
2110
+ "step": 1400
2111
+ },
2112
+ {
2113
+ "epoch": 0.9625805350514144,
2114
+ "grad_norm": 22.16371068962549,
2115
+ "learning_rate": 4.2488619119878606e-08,
2116
+ "logits/chosen": -0.6366287469863892,
2117
+ "logits/rejected": -0.5852836966514587,
2118
+ "logps/chosen": -227.71780395507812,
2119
+ "logps/rejected": -267.0358581542969,
2120
+ "loss": 0.3718,
2121
+ "rewards/accuracies": 0.831250011920929,
2122
+ "rewards/chosen": -1.6289258003234863,
2123
+ "rewards/margins": 1.7643526792526245,
2124
+ "rewards/rejected": -3.3932785987854004,
2125
+ "step": 1410
2126
+ },
2127
+ {
2128
+ "epoch": 0.9694073473567436,
2129
+ "grad_norm": 31.032456565988113,
2130
+ "learning_rate": 3.4901365705614566e-08,
2131
+ "logits/chosen": -0.6306103467941284,
2132
+ "logits/rejected": -0.5921708345413208,
2133
+ "logps/chosen": -221.66065979003906,
2134
+ "logps/rejected": -254.41958618164062,
2135
+ "loss": 0.3678,
2136
+ "rewards/accuracies": 0.823437511920929,
2137
+ "rewards/chosen": -1.5656054019927979,
2138
+ "rewards/margins": 1.682039499282837,
2139
+ "rewards/rejected": -3.2476449012756348,
2140
+ "step": 1420
2141
+ },
2142
+ {
2143
+ "epoch": 0.9762341596620728,
2144
+ "grad_norm": 26.873435878225383,
2145
+ "learning_rate": 2.731411229135053e-08,
2146
+ "logits/chosen": -0.6624563336372375,
2147
+ "logits/rejected": -0.6294071078300476,
2148
+ "logps/chosen": -224.36407470703125,
2149
+ "logps/rejected": -263.2255859375,
2150
+ "loss": 0.3681,
2151
+ "rewards/accuracies": 0.8484375476837158,
2152
+ "rewards/chosen": -1.7730777263641357,
2153
+ "rewards/margins": 1.7527307271957397,
2154
+ "rewards/rejected": -3.525808334350586,
2155
+ "step": 1430
2156
+ },
2157
+ {
2158
+ "epoch": 0.9830609719674019,
2159
+ "grad_norm": 28.36352572432148,
2160
+ "learning_rate": 1.9726858877086493e-08,
2161
+ "logits/chosen": -0.6402366161346436,
2162
+ "logits/rejected": -0.5960521697998047,
2163
+ "logps/chosen": -225.24977111816406,
2164
+ "logps/rejected": -257.8275451660156,
2165
+ "loss": 0.3734,
2166
+ "rewards/accuracies": 0.8500000238418579,
2167
+ "rewards/chosen": -1.7362611293792725,
2168
+ "rewards/margins": 1.7620372772216797,
2169
+ "rewards/rejected": -3.498298168182373,
2170
+ "step": 1440
2171
+ },
2172
+ {
2173
+ "epoch": 0.9898877842727312,
2174
+ "grad_norm": 30.66526971215358,
2175
+ "learning_rate": 1.2139605462822458e-08,
2176
+ "logits/chosen": -0.6005350351333618,
2177
+ "logits/rejected": -0.5661831498146057,
2178
+ "logps/chosen": -227.962158203125,
2179
+ "logps/rejected": -261.6782531738281,
2180
+ "loss": 0.3924,
2181
+ "rewards/accuracies": 0.8328125476837158,
2182
+ "rewards/chosen": -1.720937728881836,
2183
+ "rewards/margins": 1.5882391929626465,
2184
+ "rewards/rejected": -3.3091769218444824,
2185
+ "step": 1450
2186
+ },
2187
+ {
2188
+ "epoch": 0.9967145965780604,
2189
+ "grad_norm": 36.64240487573334,
2190
+ "learning_rate": 4.552352048558422e-09,
2191
+ "logits/chosen": -0.6393886804580688,
2192
+ "logits/rejected": -0.6115251183509827,
2193
+ "logps/chosen": -229.70652770996094,
2194
+ "logps/rejected": -268.06982421875,
2195
+ "loss": 0.3379,
2196
+ "rewards/accuracies": 0.8734375238418579,
2197
+ "rewards/chosen": -1.651149034500122,
2198
+ "rewards/margins": 1.8959904909133911,
2199
+ "rewards/rejected": -3.5471396446228027,
2200
+ "step": 1460
2201
+ },
2202
+ {
2203
+ "epoch": 1.0,
2204
+ "step": 1465,
2205
+ "total_flos": 161167907028992.0,
2206
+ "train_loss": 0.47723283336431094,
2207
+ "train_runtime": 14257.9418,
2208
+ "train_samples_per_second": 6.575,
2209
+ "train_steps_per_second": 0.103
2210
+ }
2211
+ ],
2212
+ "logging_steps": 10,
2213
+ "max_steps": 1465,
2214
+ "num_input_tokens_seen": 0,
2215
+ "num_train_epochs": 1,
2216
+ "save_steps": 500,
2217
+ "stateful_callbacks": {
2218
+ "TrainerControl": {
2219
+ "args": {
2220
+ "should_epoch_stop": false,
2221
+ "should_evaluate": false,
2222
+ "should_log": false,
2223
+ "should_save": true,
2224
+ "should_training_stop": true
2225
+ },
2226
+ "attributes": {}
2227
+ }
2228
+ },
2229
+ "total_flos": 161167907028992.0,
2230
+ "train_batch_size": 1,
2231
+ "trial_name": null,
2232
+ "trial_params": null
2233
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea2bd242c66883bf1a6d7868ecc4770435b0899a9045a8f36216220dfb9483a2
3
+ size 7544
training_loss.png ADDED
training_rewards_accuracies.png ADDED