asahi417 commited on
Commit
1df6407
·
1 Parent(s): 4b6ab15

add tokenizer

Browse files
added_tokens.json CHANGED
@@ -1 +1,3 @@
1
- {"<hl>": 50265}
 
 
 
1
+ {
2
+ "<hl>": 50265
3
+ }
special_tokens_map.json CHANGED
@@ -1 +1,18 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}, "additional_special_tokens": ["<hl>"]}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<hl>"
4
+ ],
5
+ "bos_token": "<s>",
6
+ "cls_token": "<s>",
7
+ "eos_token": "</s>",
8
+ "mask_token": {
9
+ "content": "<mask>",
10
+ "lstrip": true,
11
+ "normalized": false,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ },
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "unk_token": "<unk>"
18
+ }
tokenizer.json CHANGED
@@ -62,7 +62,8 @@
62
  "pre_tokenizer": {
63
  "type": "ByteLevel",
64
  "add_prefix_space": false,
65
- "trim_offsets": true
 
66
  },
67
  "post_processor": {
68
  "type": "RobertaProcessing",
@@ -80,7 +81,8 @@
80
  "decoder": {
81
  "type": "ByteLevel",
82
  "add_prefix_space": true,
83
- "trim_offsets": true
 
84
  },
85
  "model": {
86
  "type": "BPE",
 
62
  "pre_tokenizer": {
63
  "type": "ByteLevel",
64
  "add_prefix_space": false,
65
+ "trim_offsets": true,
66
+ "use_regex": true
67
  },
68
  "post_processor": {
69
  "type": "RobertaProcessing",
 
81
  "decoder": {
82
  "type": "ByteLevel",
83
  "add_prefix_space": true,
84
+ "trim_offsets": true,
85
+ "use_regex": true
86
  },
87
  "model": {
88
  "type": "BPE",
tokenizer_config.json CHANGED
@@ -1 +1,16 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 1024, "name_or_path": "lmqg_output/bart_base_tweetqa/model_zwjvpy/epoch_10", "special_tokens_map_file": "lmqg_output/bart_base_tweetqa/model_zwjvpy/epoch_10/special_tokens_map.json", "tokenizer_class": "BartTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<s>",
4
+ "cls_token": "<s>",
5
+ "eos_token": "</s>",
6
+ "errors": "replace",
7
+ "mask_token": "<mask>",
8
+ "model_max_length": 1024,
9
+ "name_or_path": "bart-base-tweetqa-qag",
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "special_tokens_map_file": "lmqg_output/bart_base_tweetqa/model_zwjvpy/epoch_10/special_tokens_map.json",
13
+ "tokenizer_class": "BartTokenizer",
14
+ "trim_offsets": true,
15
+ "unk_token": "<unk>"
16
+ }