Safetensors
modernbert
YYama0 commited on
Commit
7c30eab
·
verified ·
1 Parent(s): a1823af

Upload 11 files

Browse files
config.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "sbintuitions/modernbert-ja-130m",
3
+ "architectures": [
4
+ "ModernBertForSequenceClassification"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "classifier_activation": "gelu",
10
+ "classifier_bias": false,
11
+ "classifier_dropout": 0.0,
12
+ "classifier_pooling": "cls",
13
+ "cls_token_id": 6,
14
+ "decoder_bias": true,
15
+ "deterministic_flash_attn": false,
16
+ "embedding_dropout": 0.0,
17
+ "eos_token_id": 2,
18
+ "global_attn_every_n_layers": 3,
19
+ "global_rope_theta": 160000.0,
20
+ "gradient_checkpointing": false,
21
+ "hidden_activation": "gelu",
22
+ "hidden_size": 512,
23
+ "id2label": {
24
+ "0": "Medical material",
25
+ "1": "Arterial wall calcification",
26
+ "2": "Cardiomegaly",
27
+ "3": "Pericardial effusion",
28
+ "4": "Coronary artery wall calcification",
29
+ "5": "Hiatal hernia",
30
+ "6": "Lymphadenopathy",
31
+ "7": "Emphysema",
32
+ "8": "Atelectasis",
33
+ "9": "Lung nodule",
34
+ "10": "Lung opacity",
35
+ "11": "Pulmonary fibrotic sequela",
36
+ "12": "Pleural effusion",
37
+ "13": "Mosaic attenuation pattern",
38
+ "14": "Peribronchial thickening",
39
+ "15": "Consolidation",
40
+ "16": "Bronchiectasis",
41
+ "17": "Interlobular septal thickening"
42
+ },
43
+ "initializer_cutoff_factor": 2.0,
44
+ "initializer_range": 0.02,
45
+ "intermediate_size": 2048,
46
+ "label2id": {
47
+ "Arterial wall calcification": 1,
48
+ "Atelectasis": 8,
49
+ "Bronchiectasis": 16,
50
+ "Cardiomegaly": 2,
51
+ "Consolidation": 15,
52
+ "Coronary artery wall calcification": 4,
53
+ "Emphysema": 7,
54
+ "Hiatal hernia": 5,
55
+ "Interlobular septal thickening": 17,
56
+ "Lung nodule": 9,
57
+ "Lung opacity": 10,
58
+ "Lymphadenopathy": 6,
59
+ "Medical material": 0,
60
+ "Mosaic attenuation pattern": 13,
61
+ "Peribronchial thickening": 14,
62
+ "Pericardial effusion": 3,
63
+ "Pleural effusion": 12,
64
+ "Pulmonary fibrotic sequela": 11
65
+ },
66
+ "layer_norm_eps": 1e-05,
67
+ "local_attention": 128,
68
+ "local_rope_theta": 10000.0,
69
+ "max_position_embeddings": 8192,
70
+ "mlp_bias": false,
71
+ "mlp_dropout": 0.0,
72
+ "model_type": "modernbert",
73
+ "norm_bias": false,
74
+ "norm_eps": 1e-05,
75
+ "num_attention_heads": 8,
76
+ "num_hidden_layers": 19,
77
+ "pad_token_id": 3,
78
+ "position_embedding_type": "rope",
79
+ "problem_type": "multi_label_classification",
80
+ "reference_compile": false,
81
+ "repad_logits_with_grad": false,
82
+ "sep_token_id": 4,
83
+ "sparse_pred_ignore_index": -100,
84
+ "sparse_prediction": false,
85
+ "torch_dtype": "float32",
86
+ "transformers_version": "4.48.3",
87
+ "vocab_size": 102400
88
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f14e6a4114ef65ee7ae3a2fc5b3cee265997c9e69a43a9442dd42224acb8502c
3
+ size 529662048
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6e9925d3a5f0c66332913c8704ff445cddbe8b388065c3a5fdeb85e1fe5a951
3
+ size 1059400570
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9899ccda7f0d8d9511991180b93aab508ce6e8489de708c88ad1188e7e1d90d6
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e0a9fa61a05680362b84180a4051612f097fdec16acbb1748ba3b37d829e979
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<cls>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "<sep>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "<unk>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:008293028e1a9d9a1038d9b63d989a2319797dfeaa03f171093a57b33a3a8277
3
+ size 1831879
tokenizer_config.json ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_dummy_prefix_space": false,
4
+ "add_eos_token": true,
5
+ "add_prefix_space": false,
6
+ "added_tokens_decoder": {
7
+ "0": {
8
+ "content": "<unk>",
9
+ "lstrip": false,
10
+ "normalized": false,
11
+ "rstrip": false,
12
+ "single_word": false,
13
+ "special": true
14
+ },
15
+ "1": {
16
+ "content": "<s>",
17
+ "lstrip": false,
18
+ "normalized": false,
19
+ "rstrip": false,
20
+ "single_word": false,
21
+ "special": true
22
+ },
23
+ "2": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false,
29
+ "special": true
30
+ },
31
+ "3": {
32
+ "content": "<pad>",
33
+ "lstrip": false,
34
+ "normalized": false,
35
+ "rstrip": false,
36
+ "single_word": false,
37
+ "special": true
38
+ },
39
+ "4": {
40
+ "content": "<sep>",
41
+ "lstrip": false,
42
+ "normalized": false,
43
+ "rstrip": false,
44
+ "single_word": false,
45
+ "special": true
46
+ },
47
+ "5": {
48
+ "content": "<mask>",
49
+ "lstrip": false,
50
+ "normalized": false,
51
+ "rstrip": false,
52
+ "single_word": false,
53
+ "special": true
54
+ },
55
+ "6": {
56
+ "content": "<cls>",
57
+ "lstrip": false,
58
+ "normalized": false,
59
+ "rstrip": false,
60
+ "single_word": false,
61
+ "special": true
62
+ },
63
+ "7": {
64
+ "content": "<|system|>",
65
+ "lstrip": false,
66
+ "normalized": false,
67
+ "rstrip": false,
68
+ "single_word": false,
69
+ "special": false
70
+ },
71
+ "8": {
72
+ "content": "<|assistant|>",
73
+ "lstrip": false,
74
+ "normalized": false,
75
+ "rstrip": false,
76
+ "single_word": false,
77
+ "special": false
78
+ },
79
+ "9": {
80
+ "content": "<|user|>",
81
+ "lstrip": false,
82
+ "normalized": false,
83
+ "rstrip": false,
84
+ "single_word": false,
85
+ "special": false
86
+ },
87
+ "10": {
88
+ "content": "<|available_tools|>",
89
+ "lstrip": false,
90
+ "normalized": false,
91
+ "rstrip": false,
92
+ "single_word": false,
93
+ "special": false
94
+ },
95
+ "11": {
96
+ "content": "<|tool_calls|>",
97
+ "lstrip": false,
98
+ "normalized": false,
99
+ "rstrip": false,
100
+ "single_word": false,
101
+ "special": false
102
+ },
103
+ "12": {
104
+ "content": "<|tool_results|>",
105
+ "lstrip": false,
106
+ "normalized": false,
107
+ "rstrip": false,
108
+ "single_word": false,
109
+ "special": false
110
+ },
111
+ "13": {
112
+ "content": "<|code|>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false,
117
+ "special": false
118
+ },
119
+ "14": {
120
+ "content": "<|file|>",
121
+ "lstrip": false,
122
+ "normalized": false,
123
+ "rstrip": false,
124
+ "single_word": false,
125
+ "special": false
126
+ },
127
+ "102397": {
128
+ "content": "<|prefix|>",
129
+ "lstrip": false,
130
+ "normalized": false,
131
+ "rstrip": false,
132
+ "single_word": false,
133
+ "special": false
134
+ },
135
+ "102398": {
136
+ "content": "<|suffix|>",
137
+ "lstrip": false,
138
+ "normalized": false,
139
+ "rstrip": false,
140
+ "single_word": false,
141
+ "special": false
142
+ },
143
+ "102399": {
144
+ "content": "<|middle|>",
145
+ "lstrip": false,
146
+ "normalized": false,
147
+ "rstrip": false,
148
+ "single_word": false,
149
+ "special": false
150
+ }
151
+ },
152
+ "bos_token": "<s>",
153
+ "clean_up_tokenization_spaces": false,
154
+ "cls_token": "<cls>",
155
+ "do_lower_case": false,
156
+ "eos_token": "</s>",
157
+ "extra_ids": 0,
158
+ "extra_special_tokens": {},
159
+ "keep_accents": true,
160
+ "legacy": false,
161
+ "mask_token": "<mask>",
162
+ "model_max_length": 1000000000000000019884624838656,
163
+ "pad_token": "<pad>",
164
+ "padding_side": "right",
165
+ "sep_token": "<sep>",
166
+ "sp_model_kwargs": {},
167
+ "spaces_between_special_tokens": false,
168
+ "tokenizer_class": "LlamaTokenizer",
169
+ "unk_token": "<unk>",
170
+ "use_default_system_prompt": false
171
+ }
trainer_state.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.06150464341044426,
3
+ "best_model_checkpoint": "/content/drive/MyDrive/dataset_for_research/ct_rate/data/ct_rate_jpn/model_output/sbintuitions_modernbert-ja-130m/checkpoint-2278",
4
+ "epoch": 4.0,
5
+ "eval_steps": 500,
6
+ "global_step": 9112,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.21949078138718173,
13
+ "grad_norm": 7.839651584625244,
14
+ "learning_rate": 1.8902546093064093e-05,
15
+ "loss": 0.2808,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.43898156277436345,
20
+ "grad_norm": 3.2797462940216064,
21
+ "learning_rate": 1.7805092186128183e-05,
22
+ "loss": 0.0933,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.6584723441615452,
27
+ "grad_norm": 2.731287956237793,
28
+ "learning_rate": 1.6707638279192274e-05,
29
+ "loss": 0.0792,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.8779631255487269,
34
+ "grad_norm": 3.9177889823913574,
35
+ "learning_rate": 1.561018437225637e-05,
36
+ "loss": 0.0695,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 1.0,
41
+ "eval_accuracy": 0.9815139986342796,
42
+ "eval_f1": 0.9535225948862591,
43
+ "eval_loss": 0.06150464341044426,
44
+ "eval_precision": 0.9613625123639961,
45
+ "eval_recall": 0.9458095122247901,
46
+ "eval_runtime": 32.5019,
47
+ "eval_samples_per_second": 140.176,
48
+ "eval_steps_per_second": 17.537,
49
+ "step": 2278
50
+ },
51
+ {
52
+ "epoch": 1.0974539069359086,
53
+ "grad_norm": 2.4581689834594727,
54
+ "learning_rate": 1.4512730465320458e-05,
55
+ "loss": 0.0588,
56
+ "step": 2500
57
+ },
58
+ {
59
+ "epoch": 1.3169446883230904,
60
+ "grad_norm": 2.0007755756378174,
61
+ "learning_rate": 1.3415276558384549e-05,
62
+ "loss": 0.0554,
63
+ "step": 3000
64
+ },
65
+ {
66
+ "epoch": 1.536435469710272,
67
+ "grad_norm": 0.9888765811920166,
68
+ "learning_rate": 1.2317822651448641e-05,
69
+ "loss": 0.0508,
70
+ "step": 3500
71
+ },
72
+ {
73
+ "epoch": 1.755926251097454,
74
+ "grad_norm": 3.2408642768859863,
75
+ "learning_rate": 1.122036874451273e-05,
76
+ "loss": 0.0502,
77
+ "step": 4000
78
+ },
79
+ {
80
+ "epoch": 1.9754170324846356,
81
+ "grad_norm": 2.510002613067627,
82
+ "learning_rate": 1.0122914837576823e-05,
83
+ "loss": 0.0501,
84
+ "step": 4500
85
+ },
86
+ {
87
+ "epoch": 2.0,
88
+ "eval_accuracy": 0.9830382401716906,
89
+ "eval_f1": 0.9576650333262319,
90
+ "eval_loss": 0.06173006817698479,
91
+ "eval_precision": 0.9584526347852574,
92
+ "eval_recall": 0.9568787252159104,
93
+ "eval_runtime": 32.5239,
94
+ "eval_samples_per_second": 140.082,
95
+ "eval_steps_per_second": 17.526,
96
+ "step": 4556
97
+ },
98
+ {
99
+ "epoch": 2.194907813871817,
100
+ "grad_norm": 5.879360675811768,
101
+ "learning_rate": 9.025460930640914e-06,
102
+ "loss": 0.0346,
103
+ "step": 5000
104
+ },
105
+ {
106
+ "epoch": 2.4143985952589992,
107
+ "grad_norm": 0.7706089615821838,
108
+ "learning_rate": 7.928007023705005e-06,
109
+ "loss": 0.0349,
110
+ "step": 5500
111
+ },
112
+ {
113
+ "epoch": 2.633889376646181,
114
+ "grad_norm": 4.287876129150391,
115
+ "learning_rate": 6.830553116769097e-06,
116
+ "loss": 0.0321,
117
+ "step": 6000
118
+ },
119
+ {
120
+ "epoch": 2.853380158033363,
121
+ "grad_norm": 2.919949531555176,
122
+ "learning_rate": 5.7330992098331876e-06,
123
+ "loss": 0.0315,
124
+ "step": 6500
125
+ },
126
+ {
127
+ "epoch": 3.0,
128
+ "eval_accuracy": 0.9846478392351966,
129
+ "eval_f1": 0.9614760870230409,
130
+ "eval_loss": 0.06240718811750412,
131
+ "eval_precision": 0.9674856826159246,
132
+ "eval_recall": 0.9555406884807202,
133
+ "eval_runtime": 32.6149,
134
+ "eval_samples_per_second": 139.691,
135
+ "eval_steps_per_second": 17.477,
136
+ "step": 6834
137
+ },
138
+ {
139
+ "epoch": 3.0728709394205445,
140
+ "grad_norm": 0.055859215557575226,
141
+ "learning_rate": 4.6356453028972785e-06,
142
+ "loss": 0.0263,
143
+ "step": 7000
144
+ },
145
+ {
146
+ "epoch": 3.292361720807726,
147
+ "grad_norm": 0.41953563690185547,
148
+ "learning_rate": 3.53819139596137e-06,
149
+ "loss": 0.0149,
150
+ "step": 7500
151
+ },
152
+ {
153
+ "epoch": 3.511852502194908,
154
+ "grad_norm": 0.9124375581741333,
155
+ "learning_rate": 2.440737489025461e-06,
156
+ "loss": 0.0161,
157
+ "step": 8000
158
+ },
159
+ {
160
+ "epoch": 3.7313432835820897,
161
+ "grad_norm": 1.055453896522522,
162
+ "learning_rate": 1.3432835820895524e-06,
163
+ "loss": 0.014,
164
+ "step": 8500
165
+ },
166
+ {
167
+ "epoch": 3.9508340649692713,
168
+ "grad_norm": 5.611504077911377,
169
+ "learning_rate": 2.458296751536436e-07,
170
+ "loss": 0.013,
171
+ "step": 9000
172
+ },
173
+ {
174
+ "epoch": 4.0,
175
+ "eval_accuracy": 0.9850014632718759,
176
+ "eval_f1": 0.9624152050357514,
177
+ "eval_loss": 0.08579593896865845,
178
+ "eval_precision": 0.967084254482928,
179
+ "eval_recall": 0.9577910229899039,
180
+ "eval_runtime": 32.6214,
181
+ "eval_samples_per_second": 139.663,
182
+ "eval_steps_per_second": 17.473,
183
+ "step": 9112
184
+ }
185
+ ],
186
+ "logging_steps": 500,
187
+ "max_steps": 9112,
188
+ "num_input_tokens_seen": 0,
189
+ "num_train_epochs": 4,
190
+ "save_steps": 500,
191
+ "stateful_callbacks": {
192
+ "TrainerControl": {
193
+ "args": {
194
+ "should_epoch_stop": false,
195
+ "should_evaluate": false,
196
+ "should_log": false,
197
+ "should_save": true,
198
+ "should_training_stop": true
199
+ },
200
+ "attributes": {}
201
+ }
202
+ },
203
+ "total_flos": 1.3278248486495112e+16,
204
+ "train_batch_size": 8,
205
+ "trial_name": null,
206
+ "trial_params": null
207
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7e4589492eac21b18ffe7b7d5094895035279e9621f3ee6ee7685e138d796e0
3
+ size 5496