Upload tokenizer
Browse files- sentencepiece.bpe.model +2 -2
- tokenizer.json +10 -10
- tokenizer_config.json +6 -6
sentencepiece.bpe.model
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:640115634b6b559a15bce8fad333e3b0eb835fce07a253d78f22961c0a5c7864
|
3 |
+
size 802696
|
tokenizer.json
CHANGED
@@ -13,8 +13,8 @@
|
|
13 |
"special": true
|
14 |
},
|
15 |
{
|
16 |
-
"id":
|
17 |
-
"content": "
|
18 |
"single_word": false,
|
19 |
"lstrip": false,
|
20 |
"rstrip": false,
|
@@ -22,8 +22,8 @@
|
|
22 |
"special": true
|
23 |
},
|
24 |
{
|
25 |
-
"id":
|
26 |
-
"content": "
|
27 |
"single_word": false,
|
28 |
"lstrip": false,
|
29 |
"rstrip": false,
|
@@ -31,8 +31,8 @@
|
|
31 |
"special": true
|
32 |
},
|
33 |
{
|
34 |
-
"id":
|
35 |
-
"content": "<
|
36 |
"single_word": false,
|
37 |
"lstrip": false,
|
38 |
"rstrip": false,
|
@@ -162,10 +162,6 @@
|
|
162 |
"<unk>",
|
163 |
0.0
|
164 |
],
|
165 |
-
[
|
166 |
-
"<pad>",
|
167 |
-
0.0
|
168 |
-
],
|
169 |
[
|
170 |
"▁the",
|
171 |
-2.9545793533325195
|
@@ -128149,6 +128145,10 @@
|
|
128149 |
[
|
128150 |
"Aquila",
|
128151 |
-13.928366661071777
|
|
|
|
|
|
|
|
|
128152 |
]
|
128153 |
],
|
128154 |
"byte_fallback": false
|
|
|
13 |
"special": true
|
14 |
},
|
15 |
{
|
16 |
+
"id": 1,
|
17 |
+
"content": "<pad>",
|
18 |
"single_word": false,
|
19 |
"lstrip": false,
|
20 |
"rstrip": false,
|
|
|
22 |
"special": true
|
23 |
},
|
24 |
{
|
25 |
+
"id": 2,
|
26 |
+
"content": "</s>",
|
27 |
"single_word": false,
|
28 |
"lstrip": false,
|
29 |
"rstrip": false,
|
|
|
31 |
"special": true
|
32 |
},
|
33 |
{
|
34 |
+
"id": 3,
|
35 |
+
"content": "<unk>",
|
36 |
"single_word": false,
|
37 |
"lstrip": false,
|
38 |
"rstrip": false,
|
|
|
162 |
"<unk>",
|
163 |
0.0
|
164 |
],
|
|
|
|
|
|
|
|
|
165 |
[
|
166 |
"▁the",
|
167 |
-2.9545793533325195
|
|
|
128145 |
[
|
128146 |
"Aquila",
|
128147 |
-13.928366661071777
|
128148 |
+
],
|
128149 |
+
[
|
128150 |
+
"9.6",
|
128151 |
+
-13.928522109985352
|
128152 |
]
|
128153 |
],
|
128154 |
"byte_fallback": false
|
tokenizer_config.json
CHANGED
@@ -9,24 +9,24 @@
|
|
9 |
"single_word": false,
|
10 |
"special": true
|
11 |
},
|
12 |
-
"
|
13 |
-
"content": "
|
14 |
"lstrip": false,
|
15 |
"normalized": false,
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
19 |
},
|
20 |
-
"
|
21 |
-
"content": "
|
22 |
"lstrip": false,
|
23 |
"normalized": false,
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
27 |
},
|
28 |
-
"
|
29 |
-
"content": "<
|
30 |
"lstrip": false,
|
31 |
"normalized": false,
|
32 |
"rstrip": false,
|
|
|
9 |
"single_word": false,
|
10 |
"special": true
|
11 |
},
|
12 |
+
"1": {
|
13 |
+
"content": "<pad>",
|
14 |
"lstrip": false,
|
15 |
"normalized": false,
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
19 |
},
|
20 |
+
"2": {
|
21 |
+
"content": "</s>",
|
22 |
"lstrip": false,
|
23 |
"normalized": false,
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
27 |
},
|
28 |
+
"3": {
|
29 |
+
"content": "<unk>",
|
30 |
"lstrip": false,
|
31 |
"normalized": false,
|
32 |
"rstrip": false,
|