Training in progress, step 10
Browse files- adapter_config.json +5 -5
- adapter_model.safetensors +2 -2
- runs/Jan29_09-53-27_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738144438.ip-172-31-10-206.us-east-2.compute.internal.65359.0 +3 -0
- runs/Jan29_09-56-30_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738144602.ip-172-31-10-206.us-east-2.compute.internal.61470.0 +3 -0
- runs/Jan29_10-17-24_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738145853.ip-172-31-10-206.us-east-2.compute.internal.76151.0 +3 -0
- special_tokens_map.json +15 -19
- tokenizer.json +6 -19
- tokenizer_config.json +4 -24
- training_args.bin +1 -1
adapter_config.json
CHANGED
@@ -19,13 +19,13 @@
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
22 |
-
"k_proj",
|
23 |
-
"q_proj",
|
24 |
-
"v_proj",
|
25 |
"gate_proj",
|
26 |
-
"up_proj",
|
27 |
"o_proj",
|
28 |
-
"
|
|
|
|
|
|
|
|
|
29 |
],
|
30 |
"task_type": "CAUSAL_LM",
|
31 |
"use_rslora": false
|
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
|
|
|
|
|
|
22 |
"gate_proj",
|
|
|
23 |
"o_proj",
|
24 |
+
"v_proj",
|
25 |
+
"down_proj",
|
26 |
+
"q_proj",
|
27 |
+
"up_proj",
|
28 |
+
"k_proj"
|
29 |
],
|
30 |
"task_type": "CAUSAL_LM",
|
31 |
"use_rslora": false
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b451a42491549fdf165ed94ed3795bd0b119cd7632dbcb4903480e06417a8986
|
3 |
+
size 2684416208
|
runs/Jan29_09-53-27_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738144438.ip-172-31-10-206.us-east-2.compute.internal.65359.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:78220014a1d9f36dc8b4375d465b9db3d93caea298d4e5f6387cd0e1e330338d
|
3 |
+
size 4184
|
runs/Jan29_09-56-30_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738144602.ip-172-31-10-206.us-east-2.compute.internal.61470.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2282c663b478c94d33828b3a0cfc91e6fdb67f852b84e3a1f9491cad72ec1f1e
|
3 |
+
size 9990
|
runs/Jan29_10-17-24_ip-172-31-10-206.us-east-2.compute.internal/events.out.tfevents.1738145853.ip-172-31-10-206.us-east-2.compute.internal.76151.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d902fec00a16f1dfaba046756a938b169efdfaa708d65aa076abb9e2eeda001c
|
3 |
+
size 5512
|
special_tokens_map.json
CHANGED
@@ -1,23 +1,19 @@
|
|
1 |
{
|
2 |
-
"
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
],
|
18 |
-
"bos_token": "<|im_start|>",
|
19 |
-
"eos_token": "<|im_end|>",
|
20 |
-
"pad_token": "<|im_end|>",
|
21 |
"unk_token": {
|
22 |
"content": "<unk>",
|
23 |
"lstrip": false,
|
|
|
1 |
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "<unk>",
|
|
|
|
|
|
|
|
|
17 |
"unk_token": {
|
18 |
"content": "<unk>",
|
19 |
"lstrip": false,
|
tokenizer.json
CHANGED
@@ -1,6 +1,11 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
|
|
|
|
|
|
|
|
|
|
4 |
"padding": null,
|
5 |
"added_tokens": [
|
6 |
{
|
@@ -6941,24 +6946,6 @@
|
|
6941 |
"rstrip": false,
|
6942 |
"normalized": false,
|
6943 |
"special": true
|
6944 |
-
},
|
6945 |
-
{
|
6946 |
-
"id": 32768,
|
6947 |
-
"content": "<|im_start|>",
|
6948 |
-
"single_word": false,
|
6949 |
-
"lstrip": false,
|
6950 |
-
"rstrip": false,
|
6951 |
-
"normalized": false,
|
6952 |
-
"special": true
|
6953 |
-
},
|
6954 |
-
{
|
6955 |
-
"id": 32769,
|
6956 |
-
"content": "<|im_end|>",
|
6957 |
-
"single_word": false,
|
6958 |
-
"lstrip": false,
|
6959 |
-
"rstrip": false,
|
6960 |
-
"normalized": false,
|
6961 |
-
"special": true
|
6962 |
}
|
6963 |
],
|
6964 |
"normalizer": {
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": {
|
4 |
+
"direction": "Right",
|
5 |
+
"max_length": 2048,
|
6 |
+
"strategy": "LongestFirst",
|
7 |
+
"stride": 0
|
8 |
+
},
|
9 |
"padding": null,
|
10 |
"added_tokens": [
|
11 |
{
|
|
|
6946 |
"rstrip": false,
|
6947 |
"normalized": false,
|
6948 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6949 |
}
|
6950 |
],
|
6951 |
"normalizer": {
|
tokenizer_config.json
CHANGED
@@ -6170,35 +6170,15 @@
|
|
6170 |
"rstrip": false,
|
6171 |
"single_word": false,
|
6172 |
"special": true
|
6173 |
-
},
|
6174 |
-
"32768": {
|
6175 |
-
"content": "<|im_start|>",
|
6176 |
-
"lstrip": false,
|
6177 |
-
"normalized": false,
|
6178 |
-
"rstrip": false,
|
6179 |
-
"single_word": false,
|
6180 |
-
"special": true
|
6181 |
-
},
|
6182 |
-
"32769": {
|
6183 |
-
"content": "<|im_end|>",
|
6184 |
-
"lstrip": false,
|
6185 |
-
"normalized": false,
|
6186 |
-
"rstrip": false,
|
6187 |
-
"single_word": false,
|
6188 |
-
"special": true
|
6189 |
}
|
6190 |
},
|
6191 |
-
"
|
6192 |
-
"
|
6193 |
-
"<|im_end|>"
|
6194 |
-
],
|
6195 |
-
"bos_token": "<|im_start|>",
|
6196 |
-
"chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
6197 |
"clean_up_tokenization_spaces": false,
|
6198 |
-
"eos_token": "
|
6199 |
"legacy": false,
|
6200 |
"model_max_length": 1000000000000000019884624838656,
|
6201 |
-
"pad_token": "
|
6202 |
"sp_model_kwargs": {},
|
6203 |
"spaces_between_special_tokens": false,
|
6204 |
"tokenizer_class": "LlamaTokenizer",
|
|
|
6170 |
"rstrip": false,
|
6171 |
"single_word": false,
|
6172 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6173 |
}
|
6174 |
},
|
6175 |
+
"bos_token": "<s>",
|
6176 |
+
"chat_template": "{%- if messages[0][\"role\"] == \"system\" %}\n {%- set system_message = messages[0][\"content\"] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set loop_messages = messages %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n{%- set user_messages = loop_messages | selectattr(\"role\", \"equalto\", \"user\") | list %}\n\n{#- This block checks for alternating user/assistant messages, skipping tool calling messages #}\n{%- set ns = namespace() %}\n{%- set ns.index = 0 %}\n{%- for message in loop_messages %}\n {%- if not (message.role == \"tool\" or message.role == \"tool_results\" or (message.tool_calls is defined and message.tool_calls is not none)) %}\n {%- if (message[\"role\"] == \"user\") != (ns.index % 2 == 0) %}\n {{- raise_exception(\"After the optional system message, conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif %}\n {%- set ns.index = ns.index + 1 %}\n {%- endif %}\n{%- endfor %}\n\n{{- bos_token }}\n{%- for message in loop_messages %}\n {%- if message[\"role\"] == \"user\" %}\n {%- if tools is not none and (message == user_messages[-1]) %}\n {{- \"[AVAILABLE_TOOLS] [\" }}\n {%- for tool in tools %}\n {%- set tool = tool.function %}\n {{- '{\"type\": \"function\", \"function\": {' }}\n {%- for key, val in tool.items() if key != \"return\" %}\n {%- if val is string %}\n {{- '\"' + key + '\": \"' + val + '\"' }}\n {%- else %}\n {{- '\"' + key + '\": ' + val|tojson }}\n {%- endif %}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \"}}\" }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" }}\n {%- endif %}\n {%- endfor %}\n {{- \"[/AVAILABLE_TOOLS]\" }}\n {%- endif %}\n {%- if loop.last and system_message is defined %}\n {{- \"[INST] \" + system_message + \"\\n\\n\" + message[\"content\"] + \"[/INST]\" }}\n {%- else %}\n {{- \"[INST] \" + message[\"content\"] + \"[/INST]\" }}\n {%- endif %}\n {%- elif message.tool_calls is defined and message.tool_calls is not none %}\n {{- \"[TOOL_CALLS] [\" }}\n {%- for tool_call in message.tool_calls %}\n {%- set out = tool_call.function|tojson %}\n {{- out[:-1] }}\n {%- if not tool_call.id is defined or tool_call.id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- ', \"id\": \"' + tool_call.id + '\"}' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- else %}\n {{- \"]\" + eos_token }}\n {%- endif %}\n {%- endfor %}\n {%- elif message[\"role\"] == \"assistant\" %}\n {{- \" \" + message[\"content\"]|trim + eos_token}}\n {%- elif message[\"role\"] == \"tool_results\" or message[\"role\"] == \"tool\" %}\n {%- if message.content is defined and message.content.content is defined %}\n {%- set content = message.content.content %}\n {%- else %}\n {%- set content = message.content %}\n {%- endif %}\n {{- '[TOOL_RESULTS] {\"content\": ' + content|string + \", \" }}\n {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %}\n {{- raise_exception(\"Tool call IDs should be alphanumeric strings with length 9!\") }}\n {%- endif %}\n {{- '\"call_id\": \"' + message.tool_call_id + '\"}[/TOOL_RESULTS]' }}\n {%- else %}\n {{- raise_exception(\"Only user and assistant roles are supported, with the exception of an initial optional system message!\") }}\n {%- endif %}\n{%- endfor %}\n",
|
|
|
|
|
|
|
|
|
6177 |
"clean_up_tokenization_spaces": false,
|
6178 |
+
"eos_token": "</s>",
|
6179 |
"legacy": false,
|
6180 |
"model_max_length": 1000000000000000019884624838656,
|
6181 |
+
"pad_token": "<unk>",
|
6182 |
"sp_model_kwargs": {},
|
6183 |
"spaces_between_special_tokens": false,
|
6184 |
"tokenizer_class": "LlamaTokenizer",
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4920
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:317d83bbbafd845ee730787d479fdbb37fa6cb48424889fe64902c4d56ffa35b
|
3 |
size 4920
|