Pisethan commited on
Commit
fd304e6
·
verified ·
1 Parent(s): 835c82d

Upload XLMRobertaForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +10 -10
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "xlm-roberta-base",
3
  "architectures": [
4
  "XLMRobertaForSequenceClassification"
5
  ],
@@ -23,14 +23,14 @@
23
  "initializer_range": 0.02,
24
  "intermediate_size": 3072,
25
  "label2id": {
26
- "addition": 6,
27
- "algebra": 3,
28
- "arithmetic": 0,
29
- "division": 2,
30
- "exponents": 5,
31
- "geometry": 4,
32
- "multiplication": 1,
33
- "subtraction": 7
34
  },
35
  "layer_norm_eps": 1e-05,
36
  "max_position_embeddings": 514,
@@ -42,7 +42,7 @@
42
  "position_embedding_type": "absolute",
43
  "problem_type": "single_label_classification",
44
  "torch_dtype": "float32",
45
- "transformers_version": "4.47.0",
46
  "type_vocab_size": 1,
47
  "use_cache": true,
48
  "vocab_size": 250002
 
1
  {
2
+ "_name_or_path": "./model",
3
  "architectures": [
4
  "XLMRobertaForSequenceClassification"
5
  ],
 
23
  "initializer_range": 0.02,
24
  "intermediate_size": 3072,
25
  "label2id": {
26
+ "0": "arithmetic",
27
+ "1": "multiplication",
28
+ "2": "division",
29
+ "3": "algebra",
30
+ "4": "geometry",
31
+ "5": "exponents",
32
+ "6": "addition",
33
+ "7": "subtraction"
34
  },
35
  "layer_norm_eps": 1e-05,
36
  "max_position_embeddings": 514,
 
42
  "position_embedding_type": "absolute",
43
  "problem_type": "single_label_classification",
44
  "torch_dtype": "float32",
45
+ "transformers_version": "4.46.3",
46
  "type_vocab_size": 1,
47
  "use_cache": true,
48
  "vocab_size": 250002
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b892a1da419ea5e2308ca459691451091eb08ff29ec453dc60c71d90f9cda75
3
  size 1112223464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:764ee57b10117d6591fc9cfd85991af7b0983b0c2b4921da8a3e396ea5bb9157
3
  size 1112223464