Upload tokenizer files (vocab, config, README)
Browse files- tokenizer.json +6 -6
tokenizer.json
CHANGED
@@ -52,12 +52,12 @@
|
|
52 |
"type": "WordPiece",
|
53 |
"unk_token": "<unk>"
|
54 |
},
|
55 |
-
"
|
56 |
-
0,
|
57 |
-
1,
|
58 |
-
2,
|
59 |
-
3
|
60 |
-
|
61 |
"model": {
|
62 |
"type": "WordLevel",
|
63 |
"vocab": {
|
|
|
52 |
"type": "WordPiece",
|
53 |
"unk_token": "<unk>"
|
54 |
},
|
55 |
+
"special_tokens": {
|
56 |
+
"pad_token": 0,
|
57 |
+
"unk_token": 1,
|
58 |
+
"bos_token": 2,
|
59 |
+
"eos_token": 3
|
60 |
+
},
|
61 |
"model": {
|
62 |
"type": "WordLevel",
|
63 |
"vocab": {
|