damienbenveniste commited on
Commit
5d86c9e
·
verified ·
1 Parent(s): 74f4cee

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +1 -2
  2. tokenizer_config.json +0 -11
tokenizer.json CHANGED
@@ -23,7 +23,7 @@
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
- "normalized": true,
27
  "special": true
28
  }
29
  ],
@@ -54,7 +54,6 @@
54
  "end_of_word_suffix": "",
55
  "fuse_unk": false,
56
  "byte_fallback": false,
57
- "ignore_merges": false,
58
  "vocab": {
59
  "!": 0,
60
  "\"": 1,
 
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
+ "normalized": false,
27
  "special": true
28
  }
29
  ],
 
54
  "end_of_word_suffix": "",
55
  "fuse_unk": false,
56
  "byte_fallback": false,
 
57
  "vocab": {
58
  "!": 0,
59
  "\"": 1,
tokenizer_config.json CHANGED
@@ -1,20 +1,9 @@
1
  {
2
  "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "50256": {
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": true,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- }
12
- },
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
  "model_max_length": 1024,
17
- "pad_token": "<|endoftext|>",
18
  "tokenizer_class": "GPT2Tokenizer",
19
  "unk_token": "<|endoftext|>"
20
  }
 
1
  {
2
  "add_prefix_space": false,
 
 
 
 
 
 
 
 
 
 
3
  "bos_token": "<|endoftext|>",
4
  "clean_up_tokenization_spaces": true,
5
  "eos_token": "<|endoftext|>",
6
  "model_max_length": 1024,
 
7
  "tokenizer_class": "GPT2Tokenizer",
8
  "unk_token": "<|endoftext|>"
9
  }