whizzzzkid commited on
Commit
4ab5b46
·
verified ·
1 Parent(s): 312add3

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "</s>": 100290,
3
+ "<s>": 100289,
4
+ "<unk>": 100291
5
+ }
special_tokens_map.json CHANGED
@@ -35,13 +35,20 @@
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
- "content": "<|endoftext|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
 
 
 
 
 
 
 
45
  "content": "<|endoftext|>",
46
  "lstrip": false,
47
  "normalized": false,
@@ -49,7 +56,7 @@
49
  "single_word": false
50
  },
51
  "unk_token": {
52
- "content": "<|endoftext|>",
53
  "lstrip": false,
54
  "normalized": false,
55
  "rstrip": false,
 
35
  "<|extra0|>"
36
  ],
37
  "bos_token": {
38
+ "content": "<s>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
44
  "eos_token": {
45
+ "content": "</s>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ },
51
+ "pad_token": {
52
  "content": "<|endoftext|>",
53
  "lstrip": false,
54
  "normalized": false,
 
56
  "single_word": false
57
  },
58
  "unk_token": {
59
+ "content": "<unk>",
60
  "lstrip": false,
61
  "normalized": false,
62
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
@@ -299,6 +304,33 @@
299
  "rstrip": false,
300
  "normalized": false,
301
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
302
  }
303
  ],
304
  "normalizer": null,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2048,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
304
  "rstrip": false,
305
  "normalized": false,
306
  "special": true
307
+ },
308
+ {
309
+ "id": 100289,
310
+ "content": "<s>",
311
+ "single_word": false,
312
+ "lstrip": false,
313
+ "rstrip": false,
314
+ "normalized": false,
315
+ "special": true
316
+ },
317
+ {
318
+ "id": 100290,
319
+ "content": "</s>",
320
+ "single_word": false,
321
+ "lstrip": false,
322
+ "rstrip": false,
323
+ "normalized": false,
324
+ "special": true
325
+ },
326
+ {
327
+ "id": 100291,
328
+ "content": "<unk>",
329
+ "single_word": false,
330
+ "lstrip": false,
331
+ "rstrip": false,
332
+ "normalized": false,
333
+ "special": true
334
  }
335
  ],
336
  "normalizer": null,
tokenizer_config.json CHANGED
@@ -264,6 +264,30 @@
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267
  }
268
  },
269
  "additional_special_tokens": [
@@ -301,10 +325,16 @@
301
  "<|reg7|>",
302
  "<|extra0|>"
303
  ],
304
- "bos_token": "<|endoftext|>",
 
305
  "clean_up_tokenization_spaces": true,
306
- "eos_token": "<|endoftext|>",
307
- "model_max_length": 1000000000000000019884624838656,
 
 
 
308
  "tokenizer_class": "GPT2Tokenizer",
309
- "unk_token": "<|endoftext|>"
 
 
310
  }
 
264
  "rstrip": false,
265
  "single_word": false,
266
  "special": true
267
+ },
268
+ "100289": {
269
+ "content": "<s>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "100290": {
277
+ "content": "</s>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "100291": {
285
+ "content": "<unk>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
  }
292
  },
293
  "additional_special_tokens": [
 
325
  "<|reg7|>",
326
  "<|extra0|>"
327
  ],
328
+ "bos_token": "<s>",
329
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
330
  "clean_up_tokenization_spaces": true,
331
+ "eos_token": "</s>",
332
+ "max_length": 2048,
333
+ "model_max_length": 2048,
334
+ "pad_token": "<|endoftext|>",
335
+ "stride": 0,
336
  "tokenizer_class": "GPT2Tokenizer",
337
+ "truncation_side": "right",
338
+ "truncation_strategy": "longest_first",
339
+ "unk_token": "<unk>"
340
  }