Devy1 commited on
Commit
dde3acc
Β·
verified Β·
1 Parent(s): a8f21f8

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,5 +1,9 @@
1
  {
2
  "additional_special_tokens": [
 
 
 
 
3
  "▁<PRE>",
4
  "▁<MID>",
5
  "▁<SUF>",
 
1
  {
2
  "additional_special_tokens": [
3
+ "▁<PRE>",
4
+ "▁<MID>",
5
+ "▁<SUF>",
6
+ "▁<EOT>",
7
  "▁<PRE>",
8
  "▁<MID>",
9
  "▁<SUF>",
tokenizer.json CHANGED
@@ -170,6 +170,7 @@
170
  "end_of_word_suffix": null,
171
  "fuse_unk": true,
172
  "byte_fallback": true,
 
173
  "vocab": {
174
  "<unk>": 0,
175
  "<s>": 1,
 
170
  "end_of_word_suffix": null,
171
  "fuse_unk": true,
172
  "byte_fallback": true,
173
+ "ignore_merges": false,
174
  "vocab": {
175
  "<unk>": 0,
176
  "<s>": 1,
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45ccb9c8b6b561889acea59191d66986d314e7cbd6a78abc6e49b139ca91c1e6
3
+ size 500058
tokenizer_config.json CHANGED
@@ -60,6 +60,10 @@
60
  }
61
  },
62
  "additional_special_tokens": [
 
 
 
 
63
  "▁<PRE>",
64
  "▁<MID>",
65
  "▁<SUF>",
@@ -67,6 +71,7 @@
67
  ],
68
  "bos_token": "<s>",
69
  "clean_up_tokenization_spaces": false,
 
70
  "eos_token": "</s>",
71
  "eot_token": "▁<EOT>",
72
  "fill_token": "<FILL_ME>",
 
60
  }
61
  },
62
  "additional_special_tokens": [
63
+ "▁<PRE>",
64
+ "▁<MID>",
65
+ "▁<SUF>",
66
+ "▁<EOT>",
67
  "▁<PRE>",
68
  "▁<MID>",
69
  "▁<SUF>",
 
71
  ],
72
  "bos_token": "<s>",
73
  "clean_up_tokenization_spaces": false,
74
+ "device_map": "auto",
75
  "eos_token": "</s>",
76
  "eot_token": "▁<EOT>",
77
  "fill_token": "<FILL_ME>",