Diwank Singh commited on
Commit
fcca1db
·
1 Parent(s): 0e6926a

Signed-off-by: Diwank Singh <[email protected]>

Files changed (39) hide show
  1. 1_Pooling/config.json +7 -0
  2. 2_Asym/139759749869296_Dense/config.json +1 -0
  3. 2_Asym/139759749869296_Dense/pytorch_model.bin +3 -0
  4. 2_Asym/139759749870880_Dense/config.json +1 -0
  5. 2_Asym/139759749870880_Dense/pytorch_model.bin +3 -0
  6. 2_Asym/139759749873328_Dense/config.json +1 -0
  7. 2_Asym/139759749873328_Dense/pytorch_model.bin +3 -0
  8. 2_Asym/139762386917776_Dense/config.json +1 -0
  9. 2_Asym/139762386917776_Dense/pytorch_model.bin +3 -0
  10. 2_Asym/139771065711008_Dense/config.json +1 -0
  11. 2_Asym/139771065711008_Dense/pytorch_model.bin +3 -0
  12. 2_Asym/139771065971808_Dense/config.json +1 -0
  13. 2_Asym/139771065971808_Dense/pytorch_model.bin +3 -0
  14. 2_Asym/139911174117776_Dense/config.json +1 -0
  15. 2_Asym/139911174117776_Dense/pytorch_model.bin +3 -0
  16. 2_Asym/139911174118736_Dense/config.json +1 -0
  17. 2_Asym/139911174118736_Dense/pytorch_model.bin +3 -0
  18. 2_Asym/139911174119600_Dense/config.json +1 -0
  19. 2_Asym/139911174119600_Dense/pytorch_model.bin +3 -0
  20. 2_Asym/139911174122624_Dense/config.json +1 -0
  21. 2_Asym/139911174122624_Dense/pytorch_model.bin +3 -0
  22. 2_Asym/139911174123152_Dense/config.json +1 -0
  23. 2_Asym/139911174123152_Dense/pytorch_model.bin +3 -0
  24. 2_Asym/139913809775248_Dense/config.json +1 -0
  25. 2_Asym/139913809775248_Dense/pytorch_model.bin +3 -0
  26. 2_Asym/config.json +29 -0
  27. README.md +96 -1
  28. added_tokens.json +7 -0
  29. config.json +32 -0
  30. config_sentence_transformers.json +7 -0
  31. eval/.ipynb_checkpoints/similarity_evaluation_results-checkpoint.csv +2 -0
  32. eval/similarity_evaluation_results.csv +11 -0
  33. modules.json +20 -0
  34. pytorch_model.bin +3 -0
  35. sentence_bert_config.json +4 -0
  36. special_tokens_map.json +7 -0
  37. tokenizer.json +0 -0
  38. tokenizer_config.json +58 -0
  39. vocab.txt +0 -0
1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 1024,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
2_Asym/139759749869296_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 1024, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139759749869296_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81d2629d3ce8d0a4fc67587fb0faf12db9d5966fdfbc263caa061932c76823b8
3
+ size 8394364
2_Asym/139759749870880_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139759749870880_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd6694dc33f31d11d88a87da677f9755436b5fb522f73bec6b66ef0b94d097f2
3
+ size 16787068
2_Asym/139759749873328_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 1024, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139759749873328_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:445a80966eabcd388789b825ae76c8b871d651582f103b57e968d2dd05eeca87
3
+ size 8398460
2_Asym/139762386917776_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 1024, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139762386917776_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4beb5d580088294aaae0b2b32a87c55c18869c73c28d2f7de3ba029876a0abec
3
+ size 8394364
2_Asym/139771065711008_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139771065711008_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:979e712b7a332597434772b6e4841f72efc961e9057506693b8f5c740ca3771d
3
+ size 16787068
2_Asym/139771065971808_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 1024, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139771065971808_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e81a915b1fa1aa031f582c7c2f4a8dad97e47bac27ff43701074266fe6bdf6d3
3
+ size 8398460
2_Asym/139911174117776_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 1024, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139911174117776_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d802959f3b7009b25b6cd0ddd862c11d5aa3b8e34a49ca7519e6f580e965f9b7
3
+ size 8394364
2_Asym/139911174118736_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 1024, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139911174118736_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff9a818c30f6cf3246002f7d48bbf88cb932f499a033b78a31fc2d8da69b8148
3
+ size 8398460
2_Asym/139911174119600_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 1024, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139911174119600_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8618c4bf5eb623582cde02743035f0870c003bff36b753340ffe74b76e388e1
3
+ size 8398460
2_Asym/139911174122624_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139911174122624_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:060a7ad01cddc99f71e6bfe38f1269ec8374114eafcbd67c59d54a3bdf901f93
3
+ size 16787068
2_Asym/139911174123152_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 1024, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139911174123152_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a9f8fd077b6796d110550914ce3390950e1909c56b5d134b7fe75a06f960bc4
3
+ size 8394364
2_Asym/139913809775248_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 2048, "out_features": 2048, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
2_Asym/139913809775248_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a196cb53a82bd9a4a447c40314840c4917477e88f1b65e6b0f6872e0ad18744
3
+ size 16787068
2_Asym/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "types": {
3
+ "139759749873328_Dense": "sentence_transformers.models.Dense",
4
+ "139771065711008_Dense": "sentence_transformers.models.Dense",
5
+ "139762386917776_Dense": "sentence_transformers.models.Dense",
6
+ "139762386917200_Normalize": "sentence_transformers.models.Normalize",
7
+ "139771065971808_Dense": "sentence_transformers.models.Dense",
8
+ "139759749870880_Dense": "sentence_transformers.models.Dense",
9
+ "139759749869296_Dense": "sentence_transformers.models.Dense",
10
+ "139759749864928_Normalize": "sentence_transformers.models.Normalize"
11
+ },
12
+ "structure": {
13
+ "dialog": [
14
+ "139759749873328_Dense",
15
+ "139771065711008_Dense",
16
+ "139762386917776_Dense",
17
+ "139762386917200_Normalize"
18
+ ],
19
+ "fact": [
20
+ "139771065971808_Dense",
21
+ "139759749870880_Dense",
22
+ "139759749869296_Dense",
23
+ "139759749864928_Normalize"
24
+ ]
25
+ },
26
+ "parameters": {
27
+ "allow_empty_key": false
28
+ }
29
+ }
README.md CHANGED
@@ -1,3 +1,98 @@
1
  ---
2
- license: mit
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+
8
  ---
9
+
10
+ # {MODEL_NAME}
11
+
12
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 2048 dimensional dense vector space and can be used for tasks like clustering or semantic search.
13
+
14
+ <!--- Describe your model here -->
15
+
16
+ ## Usage (Sentence-Transformers)
17
+
18
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
19
+
20
+ ```
21
+ pip install -U sentence-transformers
22
+ ```
23
+
24
+ Then you can use the model like this:
25
+
26
+ ```python
27
+ from sentence_transformers import SentenceTransformer
28
+ sentences = ["This is an example sentence", "Each sentence is converted"]
29
+
30
+ model = SentenceTransformer('{MODEL_NAME}')
31
+ embeddings = model.encode(sentences)
32
+ print(embeddings)
33
+ ```
34
+
35
+
36
+
37
+ ## Evaluation Results
38
+
39
+ <!--- Describe how your model was evaluated -->
40
+
41
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
42
+
43
+
44
+ ## Training
45
+ The model was trained with the parameters:
46
+
47
+ **DataLoader**:
48
+
49
+ `torch.utils.data.dataloader.DataLoader` of length 3633 with parameters:
50
+ ```
51
+ {'batch_size': 1024, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
52
+ ```
53
+
54
+ **Loss**:
55
+
56
+ `sentence_transformers.losses.OnlineContrastiveLoss.OnlineContrastiveLoss`
57
+
58
+ Parameters of the fit()-Method:
59
+ ```
60
+ {
61
+ "epochs": 6,
62
+ "evaluation_steps": 2000,
63
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
64
+ "max_grad_norm": 1,
65
+ "optimizer_class": "<class 'lion_pytorch.lion_pytorch.Lion'>",
66
+ "optimizer_params": {
67
+ "lr": 0.0001,
68
+ "weight_decay": 0.01
69
+ },
70
+ "scheduler": "WarmupCosine",
71
+ "steps_per_epoch": null,
72
+ "warmup_steps": 100,
73
+ "weight_decay": 0.01
74
+ }
75
+ ```
76
+
77
+
78
+ ## Full Model Architecture
79
+ ```
80
+ SentenceTransformer(
81
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': True}) with Transformer model: BertModel
82
+ (1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
83
+ (2): Asym(
84
+ (dialog-0): Dense({'in_features': 1024, 'out_features': 2048, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
85
+ (dialog-1): Dense({'in_features': 2048, 'out_features': 2048, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
86
+ (dialog-2): Dense({'in_features': 2048, 'out_features': 1024, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
87
+ (dialog-3): Normalize()
88
+ (fact-0): Dense({'in_features': 1024, 'out_features': 2048, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
89
+ (fact-1): Dense({'in_features': 2048, 'out_features': 2048, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
90
+ (fact-2): Dense({'in_features': 2048, 'out_features': 1024, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
91
+ (fact-3): Normalize()
92
+ )
93
+ )
94
+ ```
95
+
96
+ ## Citing & Authors
97
+
98
+ <!--- Describe where people can find more information -->
added_tokens.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "[CLS]": 101,
3
+ "[MASK]": 103,
4
+ "[PAD]": 0,
5
+ "[SEP]": 102,
6
+ "[UNK]": 100
7
+ }
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/root/.cache/torch/sentence_transformers/BAAI_bge-large-en-v1.5/",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
+ "id2label": {
13
+ "0": "LABEL_0"
14
+ },
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 4096,
17
+ "label2id": {
18
+ "LABEL_0": 0
19
+ },
20
+ "layer_norm_eps": 1e-12,
21
+ "max_position_embeddings": 512,
22
+ "model_type": "bert",
23
+ "num_attention_heads": 16,
24
+ "num_hidden_layers": 24,
25
+ "pad_token_id": 0,
26
+ "position_embedding_type": "absolute",
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.34.0",
29
+ "type_vocab_size": 2,
30
+ "use_cache": true,
31
+ "vocab_size": 30522
32
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.2",
4
+ "transformers": "4.28.1",
5
+ "pytorch": "1.13.0+cu117"
6
+ }
7
+ }
eval/.ipynb_checkpoints/similarity_evaluation_results-checkpoint.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,200,0.09538986217734022,0.08384386662398645,0.09539623750199215,0.08382535432287805,0.036019534063834034,0.030707494752119608,0.09538948135965447,0.08383444891529954
eval/similarity_evaluation_results.csv ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,200,0.09538986217734022,0.08384386662398645,0.09539623750199215,0.08382535432287805,0.036019534063834034,0.030707494752119608,0.09538948135965447,0.08383444891529954
3
+ 0,400,0.4715094507136195,0.5037850362310335,0.47538241497043077,0.5037851242199534,0.4938051360078945,0.5060080389239441,0.47150933344974283,0.5037850314478904
4
+ 0,2000,0.7228417329018256,0.7045284462973851,0.7416009762958347,0.7045286446661528,0.7294892059102114,0.7034054227606624,0.7228416863848557,0.704528494298767
5
+ 0,-1,0.7387045312329137,0.7114060466653345,0.7576018994493214,0.711405910689208,0.7441002517102357,0.7102147572542092,0.7387045555426008,0.7114061507099086
6
+ 1,2000,0.7445957522164166,0.7144410506640689,0.7632079551550423,0.7144408986857114,0.750044548143612,0.713377520008261,0.7445956500500408,0.7144408602956258
7
+ 1,-1,0.7403974962810842,0.7117442453449414,0.7592900426897151,0.7117442853401136,0.7462961412416763,0.7104840261620768,0.740397464607492,0.711744155778095
8
+ 2,2000,0.7412765003718448,0.7115736774299289,0.7595686820436869,0.7115736262673157,0.7469862005999004,0.7105323032569937,0.7412764378172917,0.7115734759134695
9
+ 2,-1,0.7390358779662405,0.7105991453607753,0.757392918167112,0.7105991038189222,0.7444325444223947,0.7094801004272172,0.7390358427716103,0.7105991229781184
10
+ 3,2000,0.7393006180821876,0.710289426944981,0.7576480853962786,0.7102894877307891,0.74440332435888,0.7092671328929592,0.7393005765997176,0.7102895181099055
11
+ 3,-1,0.7369147194033147,0.7091520824844612,0.7555130843201373,0.709152216865955,0.7421882753435206,0.7080979679445787,0.7369146918063216,0.7091522664624613
modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Asym",
18
+ "type": "sentence_transformers.models.Asym"
19
+ }
20
+ ]
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b4674c2135c4571743ae1c8ac2c8aa857cfdbd69eb18ecc2f39b57063463452
3
+ size 1340699814
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": true
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "additional_special_tokens": [],
45
+ "clean_up_tokenization_spaces": true,
46
+ "cls_token": "[CLS]",
47
+ "do_basic_tokenize": true,
48
+ "do_lower_case": true,
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 512,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "BertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff