Upload tokenizer
Browse files- special_tokens_map.json +21 -3
- tokenizer.json +0 -0
- tokenizer_config.json +6 -1
special_tokens_map.json
CHANGED
@@ -8,7 +8,25 @@
|
|
8 |
"single_word": false
|
9 |
}
|
10 |
],
|
11 |
-
"eos_token":
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
}
|
|
|
8 |
"single_word": false
|
9 |
}
|
10 |
],
|
11 |
+
"eos_token": {
|
12 |
+
"content": "</s>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false
|
17 |
+
},
|
18 |
+
"pad_token": {
|
19 |
+
"content": "<pad>",
|
20 |
+
"lstrip": false,
|
21 |
+
"normalized": false,
|
22 |
+
"rstrip": false,
|
23 |
+
"single_word": false
|
24 |
+
},
|
25 |
+
"unk_token": {
|
26 |
+
"content": "<unk>",
|
27 |
+
"lstrip": false,
|
28 |
+
"normalized": false,
|
29 |
+
"rstrip": false,
|
30 |
+
"single_word": false
|
31 |
+
}
|
32 |
}
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -855,8 +855,13 @@
|
|
855 |
"clean_up_tokenization_spaces": true,
|
856 |
"eos_token": "</s>",
|
857 |
"extra_ids": 100,
|
858 |
-
"
|
|
|
859 |
"pad_token": "<pad>",
|
|
|
|
|
860 |
"tokenizer_class": "T5Tokenizer",
|
|
|
|
|
861 |
"unk_token": "<unk>"
|
862 |
}
|
|
|
855 |
"clean_up_tokenization_spaces": true,
|
856 |
"eos_token": "</s>",
|
857 |
"extra_ids": 100,
|
858 |
+
"max_length": 512,
|
859 |
+
"model_max_length": 512,
|
860 |
"pad_token": "<pad>",
|
861 |
+
"sp_model_kwargs": {},
|
862 |
+
"stride": 0,
|
863 |
"tokenizer_class": "T5Tokenizer",
|
864 |
+
"truncation_side": "right",
|
865 |
+
"truncation_strategy": "longest_first",
|
866 |
"unk_token": "<unk>"
|
867 |
}
|