qgallouedec HF Staff commited on
Commit
ff11d21
·
verified ·
1 Parent(s): 6641f65

Upload Qwen3ForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +44 -8
  2. model.safetensors +2 -2
config.json CHANGED
@@ -4,10 +4,12 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "dtype": "float32",
 
 
8
  "head_dim": 128,
9
  "hidden_act": "silu",
10
- "hidden_size": 8,
11
  "id2label": {
12
  "0": "LABEL_0"
13
  },
@@ -17,22 +19,56 @@
17
  "LABEL_0": 0
18
  },
19
  "layer_types": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  "full_attention",
21
  "full_attention"
22
  ],
23
- "max_position_embeddings": 32768,
24
- "max_window_layers": 28,
25
  "model_type": "qwen3",
26
  "num_attention_heads": 4,
27
  "num_hidden_layers": 2,
28
  "num_key_value_heads": 2,
29
  "rms_norm_eps": 1e-06,
30
  "rope_scaling": null,
31
- "rope_theta": 10000.0,
32
  "sliding_window": null,
33
- "tie_word_embeddings": false,
34
- "transformers_version": "4.57.0.dev0",
35
  "use_cache": true,
36
  "use_sliding_window": false,
37
- "vocab_size": 151669
38
  }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "dtype": "bfloat16",
9
+ "eos_token_id": 151645,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
+ "hidden_size": 16,
13
  "id2label": {
14
  "0": "LABEL_0"
15
  },
 
19
  "LABEL_0": 0
20
  },
21
  "layer_types": [
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
  "full_attention",
57
  "full_attention"
58
  ],
59
+ "max_position_embeddings": 40960,
60
+ "max_window_layers": 36,
61
  "model_type": "qwen3",
62
  "num_attention_heads": 4,
63
  "num_hidden_layers": 2,
64
  "num_key_value_heads": 2,
65
  "rms_norm_eps": 1e-06,
66
  "rope_scaling": null,
67
+ "rope_theta": 1000000,
68
  "sliding_window": null,
69
+ "tie_word_embeddings": true,
70
+ "transformers_version": "4.56.2",
71
  "use_cache": true,
72
  "use_sliding_window": false,
73
+ "vocab_size": 151936
74
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50d12fcc7f50cfd327532643e3877c9a6ea13d142ebfe5780f04c454e4d271a1
3
- size 4962680
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c93aade12b3da3cc0cc83191b8bb8020a02bbf1140baee9f07075f6b5c8f216
3
+ size 4970248