Fix tokenizers (from @unsloth's clean versions)
Browse files- added_tokens.json +2 -1
- config.json +4 -3
- generation_config.json +6 -2
- model-00002-of-00002.safetensors +2 -2
- special_tokens_map.json +3 -8
- tokenizer_config.json +11 -3
added_tokens.json
CHANGED
@@ -8,5 +8,6 @@
|
|
8 |
"<|tool_call|>": 200025,
|
9 |
"<|tool_response|>": 200027,
|
10 |
"<|tool|>": 200023,
|
11 |
-
"<|user|>": 200021
|
|
|
12 |
}
|
|
|
8 |
"<|tool_call|>": 200025,
|
9 |
"<|tool_response|>": 200027,
|
10 |
"<|tool|>": 200023,
|
11 |
+
"<|user|>": 200021,
|
12 |
+
"<|PAD▁TOKEN|>": 200029
|
13 |
}
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "Phi-4-mini-instruct",
|
3 |
"architectures": [
|
4 |
"Phi3ForCausalLM"
|
5 |
],
|
@@ -27,7 +27,7 @@
|
|
27 |
"num_hidden_layers": 32,
|
28 |
"num_key_value_heads": 8,
|
29 |
"original_max_position_embeddings": 4096,
|
30 |
-
"pad_token_id":
|
31 |
"partial_rotary_factor": 0.75,
|
32 |
"resid_pdrop": 0.0,
|
33 |
"rms_norm_eps": 1e-05,
|
@@ -138,7 +138,8 @@
|
|
138 |
"sliding_window": 262144,
|
139 |
"tie_word_embeddings": true,
|
140 |
"torch_dtype": "bfloat16",
|
141 |
-
"transformers_version": "4.
|
|
|
142 |
"use_cache": true,
|
143 |
"vocab_size": 200064
|
144 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "microsoft/Phi-4-mini-instruct",
|
3 |
"architectures": [
|
4 |
"Phi3ForCausalLM"
|
5 |
],
|
|
|
27 |
"num_hidden_layers": 32,
|
28 |
"num_key_value_heads": 8,
|
29 |
"original_max_position_embeddings": 4096,
|
30 |
+
"pad_token_id": 200029,
|
31 |
"partial_rotary_factor": 0.75,
|
32 |
"resid_pdrop": 0.0,
|
33 |
"rms_norm_eps": 1e-05,
|
|
|
138 |
"sliding_window": 262144,
|
139 |
"tie_word_embeddings": true,
|
140 |
"torch_dtype": "bfloat16",
|
141 |
+
"transformers_version": "4.49.0",
|
142 |
+
"unsloth_fixed": true,
|
143 |
"use_cache": true,
|
144 |
"vocab_size": 200064
|
145 |
}
|
generation_config.json
CHANGED
@@ -1,7 +1,11 @@
|
|
1 |
{
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 199999,
|
4 |
-
"eos_token_id":
|
|
|
|
|
|
|
|
|
5 |
"pad_token_id": 200029,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
1 |
{
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 199999,
|
4 |
+
"eos_token_id": [
|
5 |
+
200020,
|
6 |
+
199999
|
7 |
+
],
|
8 |
+
"max_length": 131072,
|
9 |
"pad_token_id": 200029,
|
10 |
+
"transformers_version": "4.49.0"
|
11 |
}
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b95024d663f331ee51097cf857cafc7159a5b478fc8d35195cb3312b95f101c
|
3 |
+
size 2415521792
|
special_tokens_map.json
CHANGED
@@ -7,17 +7,12 @@
|
|
7 |
"single_word": false
|
8 |
},
|
9 |
"eos_token": {
|
10 |
-
"content": "<|
|
11 |
"lstrip": false,
|
12 |
"normalized": false,
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
-
"pad_token":
|
17 |
-
|
18 |
-
"lstrip": false,
|
19 |
-
"normalized": false,
|
20 |
-
"rstrip": false,
|
21 |
-
"single_word": false
|
22 |
-
}
|
23 |
}
|
|
|
7 |
"single_word": false
|
8 |
},
|
9 |
"eos_token": {
|
10 |
+
"content": "<|endoftext|>",
|
11 |
"lstrip": false,
|
12 |
"normalized": false,
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
+
"pad_token": "<|PAD▁TOKEN|>",
|
17 |
+
"unk_token": "�"
|
|
|
|
|
|
|
|
|
|
|
18 |
}
|
tokenizer_config.json
CHANGED
@@ -3,6 +3,14 @@
|
|
3 |
"add_eos_token": false,
|
4 |
"add_prefix_space": false,
|
5 |
"added_tokens_decoder": {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
"199999": {
|
7 |
"content": "<|endoftext|>",
|
8 |
"lstrip": false,
|
@@ -100,7 +108,7 @@
|
|
100 |
"special": true
|
101 |
},
|
102 |
"200029": {
|
103 |
-
"content": "
|
104 |
"lstrip": false,
|
105 |
"normalized": false,
|
106 |
"rstrip": false,
|
@@ -111,10 +119,10 @@
|
|
111 |
"bos_token": "<|endoftext|>",
|
112 |
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' and 'tools' in message and message['tools'] is not none %}{{ '<|' + message['role'] + '|>' + message['content'] + '<|tool|>' + message['tools'] + '<|/tool|>' + '<|end|>' }}{% else %}{{ '<|' + message['role'] + '|>' + message['content'] + '<|end|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>' }}{% endif %}",
|
113 |
"clean_up_tokenization_spaces": false,
|
114 |
-
"eos_token": "<|
|
115 |
"extra_special_tokens": {},
|
116 |
"model_max_length": 131072,
|
117 |
-
"pad_token": "
|
118 |
"padding_side": "left",
|
119 |
"tokenizer_class": "GPT2Tokenizer",
|
120 |
"unk_token": "�"
|
|
|
3 |
"add_eos_token": false,
|
4 |
"add_prefix_space": false,
|
5 |
"added_tokens_decoder": {
|
6 |
+
"3251": {
|
7 |
+
"content": "�",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
"199999": {
|
15 |
"content": "<|endoftext|>",
|
16 |
"lstrip": false,
|
|
|
108 |
"special": true
|
109 |
},
|
110 |
"200029": {
|
111 |
+
"content": "<|PAD▁TOKEN|>",
|
112 |
"lstrip": false,
|
113 |
"normalized": false,
|
114 |
"rstrip": false,
|
|
|
119 |
"bos_token": "<|endoftext|>",
|
120 |
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' and 'tools' in message and message['tools'] is not none %}{{ '<|' + message['role'] + '|>' + message['content'] + '<|tool|>' + message['tools'] + '<|/tool|>' + '<|end|>' }}{% else %}{{ '<|' + message['role'] + '|>' + message['content'] + '<|end|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>' }}{% endif %}",
|
121 |
"clean_up_tokenization_spaces": false,
|
122 |
+
"eos_token": "<|endoftext|>",
|
123 |
"extra_special_tokens": {},
|
124 |
"model_max_length": 131072,
|
125 |
+
"pad_token": "<|PAD▁TOKEN|>",
|
126 |
"padding_side": "left",
|
127 |
"tokenizer_class": "GPT2Tokenizer",
|
128 |
"unk_token": "�"
|