Upload folder using huggingface_hub
Browse files- config.json +2 -2
- model.safetensors +2 -2
- tokenizer_config.json +7 -0
config.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"_num_labels": 2,
|
4 |
"architectures": [
|
5 |
-
"
|
6 |
],
|
7 |
"attention_probs_dropout_prob": 0.1,
|
8 |
"classifier_dropout": null,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "RewyB/semi-supervised-hatebert",
|
3 |
"_num_labels": 2,
|
4 |
"architectures": [
|
5 |
+
"BertForSequenceClassification"
|
6 |
],
|
7 |
"attention_probs_dropout_prob": 0.1,
|
8 |
"classifier_dropout": null,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3a59035b396442aae447870c1595fd185867bc7cc00ab44e716e3f45736be98
|
3 |
+
size 437977080
|
tokenizer_config.json
CHANGED
@@ -96,12 +96,19 @@
|
|
96 |
"extra_special_tokens": {},
|
97 |
"mask_token": "[MASK]",
|
98 |
"max_len": 512,
|
|
|
99 |
"model_max_length": 512,
|
100 |
"never_split": null,
|
|
|
101 |
"pad_token": "[PAD]",
|
|
|
|
|
102 |
"sep_token": "[SEP]",
|
|
|
103 |
"strip_accents": null,
|
104 |
"tokenize_chinese_chars": true,
|
105 |
"tokenizer_class": "BertTokenizer",
|
|
|
|
|
106 |
"unk_token": "[UNK]"
|
107 |
}
|
|
|
96 |
"extra_special_tokens": {},
|
97 |
"mask_token": "[MASK]",
|
98 |
"max_len": 512,
|
99 |
+
"max_length": 512,
|
100 |
"model_max_length": 512,
|
101 |
"never_split": null,
|
102 |
+
"pad_to_multiple_of": null,
|
103 |
"pad_token": "[PAD]",
|
104 |
+
"pad_token_type_id": 0,
|
105 |
+
"padding_side": "right",
|
106 |
"sep_token": "[SEP]",
|
107 |
+
"stride": 0,
|
108 |
"strip_accents": null,
|
109 |
"tokenize_chinese_chars": true,
|
110 |
"tokenizer_class": "BertTokenizer",
|
111 |
+
"truncation_side": "right",
|
112 |
+
"truncation_strategy": "longest_first",
|
113 |
"unk_token": "[UNK]"
|
114 |
}
|