Adjust number of reserved tokens to match the model
#15
by
dzhulgakov
- opened
- tokenization_kimi.py +1 -1
tokenization_kimi.py
CHANGED
@@ -103,7 +103,7 @@ class TikTokenTokenizer(PreTrainedTokenizer):
|
|
103 |
self.special_tokens = {
|
104 |
special_tokens_mapping.get(i, f"<|reserved_token_{i}|>"): i
|
105 |
for i in range(
|
106 |
-
num_base_tokens, num_base_tokens + self.num_reserved_special_tokens
|
107 |
)
|
108 |
}
|
109 |
|
|
|
103 |
self.special_tokens = {
|
104 |
special_tokens_mapping.get(i, f"<|reserved_token_{i}|>"): i
|
105 |
for i in range(
|
106 |
+
num_base_tokens, num_base_tokens + self.num_reserved_special_tokens
|
107 |
)
|
108 |
}
|
109 |
|