NeuraCraft commited on
Commit
5828935
·
1 Parent(s): 6d12e55

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +3 -3
  2. special_tokens_map.json +12 -12
  3. tokenizer_config.json +29 -29
added_tokens.json CHANGED
@@ -1,3 +1,3 @@
1
- {
2
- "[PAD]": 50257
3
- }
 
1
+ {
2
+ "[PAD]": 50257
3
+ }
special_tokens_map.json CHANGED
@@ -1,12 +1,12 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": {
5
- "content": "[PAD]",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false
10
- },
11
- "unk_token": "<|endoftext|>"
12
- }
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": {
5
+ "content": "[PAD]",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false
10
+ },
11
+ "unk_token": "<|endoftext|>"
12
+ }
tokenizer_config.json CHANGED
@@ -1,29 +1,29 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "50256": {
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": true,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "50257": {
13
- "content": "[PAD]",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- }
20
- },
21
- "bos_token": "<|endoftext|>",
22
- "clean_up_tokenization_spaces": false,
23
- "eos_token": "<|endoftext|>",
24
- "extra_special_tokens": {},
25
- "model_max_length": 1024,
26
- "pad_token": "[PAD]",
27
- "tokenizer_class": "GPT2Tokenizer",
28
- "unk_token": "<|endoftext|>"
29
- }
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "50257": {
13
+ "content": "[PAD]",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ }
20
+ },
21
+ "bos_token": "<|endoftext|>",
22
+ "clean_up_tokenization_spaces": false,
23
+ "eos_token": "<|endoftext|>",
24
+ "extra_special_tokens": {},
25
+ "model_max_length": 1024,
26
+ "pad_token": "[PAD]",
27
+ "tokenizer_class": "GPT2Tokenizer",
28
+ "unk_token": "<|endoftext|>"
29
+ }