devesh-2002 commited on
Commit
98e32c3
·
verified ·
1 Parent(s): 90a12ff

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -7
  2. tokenizer_config.json +1 -1
special_tokens_map.json CHANGED
@@ -13,13 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "<pad>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<eos>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -1999,7 +1999,7 @@
1999
  "clean_up_tokenization_spaces": false,
2000
  "eos_token": "<eos>",
2001
  "model_max_length": 1000000000000000019884624838656,
2002
- "pad_token": "<pad>",
2003
  "sp_model_kwargs": {},
2004
  "spaces_between_special_tokens": false,
2005
  "tokenizer_class": "GemmaTokenizer",
 
1999
  "clean_up_tokenization_spaces": false,
2000
  "eos_token": "<eos>",
2001
  "model_max_length": 1000000000000000019884624838656,
2002
+ "pad_token": "<eos>",
2003
  "sp_model_kwargs": {},
2004
  "spaces_between_special_tokens": false,
2005
  "tokenizer_class": "GemmaTokenizer",