colorlessideas commited on
Commit
b7902f5
·
verified ·
1 Parent(s): ba275ff

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +4 -5
  3. vocab.json +30 -43
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 45,
3
- "<s>": 44
4
  }
 
1
  {
2
+ "</s>": 32,
3
+ "<s>": 31
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "42": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "43": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "44": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "45": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
@@ -40,7 +40,6 @@
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
43
- "processor_class": "Wav2Vec2Processor",
44
  "replace_word_delimiter_char": " ",
45
  "target_lang": null,
46
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "29": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "30": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "31": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "32": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
 
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
 
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,46 +1,33 @@
1
  {
2
- "&": 1,
3
- "0": 2,
4
- "1": 3,
5
- "2": 4,
6
- "7": 5,
7
- "8": 6,
8
- "[PAD]": 43,
9
- "[UNK]": 42,
10
- "\\": 7,
11
- "a": 8,
12
- "b": 9,
13
- "c": 10,
14
- "d": 11,
15
- "e": 12,
16
- "f": 13,
17
- "g": 14,
18
- "h": 15,
19
- "i": 16,
20
- "j": 17,
21
- "k": 18,
22
- "l": 19,
23
- "m": 20,
24
- "n": 21,
25
- "o": 22,
26
- "p": 23,
27
- "q": 24,
28
- "r": 25,
29
- "s": 26,
30
- "t": 27,
31
- "u": 28,
32
- "v": 29,
33
- "w": 30,
34
- "x": 31,
35
- "y": 32,
36
- "z": 33,
37
  "|": 0,
38
- "¿": 34,
39
- "á": 35,
40
- "é": 36,
41
- "ì": 37,
42
- "í": 38,
43
- "ñ": 39,
44
- "ó": 40,
45
- "ú": 41
46
  }
 
1
  {
2
+ "'": 1,
3
+ "[PAD]": 30,
4
+ "[UNK]": 29,
5
+ "a": 2,
6
+ "b": 3,
7
+ "c": 4,
8
+ "d": 5,
9
+ "e": 6,
10
+ "f": 7,
11
+ "g": 8,
12
+ "h": 9,
13
+ "i": 10,
14
+ "j": 11,
15
+ "k": 12,
16
+ "l": 13,
17
+ "m": 14,
18
+ "n": 15,
19
+ "o": 16,
20
+ "p": 17,
21
+ "q": 18,
22
+ "r": 19,
23
+ "s": 20,
24
+ "t": 21,
25
+ "u": 22,
26
+ "v": 23,
27
+ "w": 24,
28
+ "x": 25,
29
+ "y": 26,
30
+ "z": 27,
 
 
 
 
 
 
31
  "|": 0,
32
+ "ñ": 28
 
 
 
 
 
 
 
33
  }