wsqstar commited on
Commit
603c151
·
verified ·
1 Parent(s): dee3175

Upload tokenizer

Browse files
Files changed (3) hide show
  1. README.md +3 -3
  2. tokenizer.json +16 -2
  3. tokenizer_config.json +1 -2
README.md CHANGED
@@ -1,13 +1,13 @@
1
  ---
2
- library_name: transformers
3
  base_model: bert-base-chinese
4
- tags:
5
- - generated_from_trainer
6
  metrics:
7
  - accuracy
8
  - precision
9
  - recall
10
  - f1
 
 
11
  model-index:
12
  - name: weibo-model-4tags
13
  results: []
 
1
  ---
 
2
  base_model: bert-base-chinese
3
+ library_name: transformers
 
4
  metrics:
5
  - accuracy
6
  - precision
7
  - recall
8
  - f1
9
+ tags:
10
+ - generated_from_trainer
11
  model-index:
12
  - name: weibo-model-4tags
13
  results: []
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 512
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "[PAD]"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
tokenizer_config.json CHANGED
@@ -41,10 +41,9 @@
41
  "special": true
42
  }
43
  },
44
- "clean_up_tokenization_spaces": false,
45
  "cls_token": "[CLS]",
46
  "do_lower_case": false,
47
- "extra_special_tokens": {},
48
  "mask_token": "[MASK]",
49
  "model_max_length": 512,
50
  "pad_token": "[PAD]",
 
41
  "special": true
42
  }
43
  },
44
+ "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_lower_case": false,
 
47
  "mask_token": "[MASK]",
48
  "model_max_length": 512,
49
  "pad_token": "[PAD]",