aapoliakova commited on
Commit
199dd49
·
verified ·
1 Parent(s): e4ced52

aapoliakova/bsf_cls

Browse files
README.md CHANGED
@@ -1,7 +1,5 @@
1
  ---
2
  library_name: transformers
3
- license: mit
4
- base_model: microsoft/deberta-v3-small
5
  tags:
6
  - generated_from_trainer
7
  model-index:
@@ -14,15 +12,7 @@ should probably proofread and complete it, then remove this comment. -->
14
 
15
  # cls_level_bsf
16
 
17
- This model is a fine-tuned version of [microsoft/deberta-v3-small](https://huggingface.co/microsoft/deberta-v3-small) on an unknown dataset.
18
- It achieves the following results on the evaluation set:
19
- - eval_loss: 1.5422
20
- - eval_accuracy: 0.4860
21
- - eval_runtime: 7.168
22
- - eval_samples_per_second: 39.899
23
- - eval_steps_per_second: 2.511
24
- - epoch: 3.2308
25
- - step: 1050
26
 
27
  ## Model description
28
 
 
1
  ---
2
  library_name: transformers
 
 
3
  tags:
4
  - generated_from_trainer
5
  model-index:
 
12
 
13
  # cls_level_bsf
14
 
15
+ This model was trained from scratch on an unknown dataset.
 
 
 
 
 
 
 
 
16
 
17
  ## Model description
18
 
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "microsoft/deberta-v3-small",
3
  "architectures": [
4
  "DebertaV2ForSequenceClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "/content/cls_level_bsf/checkpoint-300",
3
  "architectures": [
4
  "DebertaV2ForSequenceClassification"
5
  ],
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05bce8b50732d5120a22c782d4131cade8617527addad6690966cd68097d9a63
3
  size 567601628
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90dbba4ab23da6829b5f1f22e15e32cfeeb0fd4cc0ee51fce5fb9e22c0ea4112
3
  size 567601628
runs/Oct01_16-37-55_36a7c0a38235/events.out.tfevents.1727800683.36a7c0a38235.1511.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50eef6419a42ab65b9612683f844aee1182b9c5eee4862af320739295754819b
3
+ size 409
special_tokens_map.json CHANGED
@@ -1,10 +1,46 @@
1
  {
2
- "bos_token": "[CLS]",
3
- "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
- "mask_token": "[MASK]",
6
- "pad_token": "[PAD]",
7
- "sep_token": "[SEP]",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  "unk_token": {
9
  "content": "[UNK]",
10
  "lstrip": false,
 
1
  {
2
+ "bos_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "[CLS]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "[SEP]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "[MASK]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "[PAD]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "[SEP]",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
  "unk_token": {
45
  "content": "[UNK]",
46
  "lstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -47,12 +47,19 @@
47
  "do_lower_case": false,
48
  "eos_token": "[SEP]",
49
  "mask_token": "[MASK]",
 
50
  "model_max_length": 1000000000000000019884624838656,
 
51
  "pad_token": "[PAD]",
 
 
52
  "sep_token": "[SEP]",
53
  "sp_model_kwargs": {},
54
  "split_by_punct": false,
 
55
  "tokenizer_class": "DebertaV2Tokenizer",
 
 
56
  "unk_token": "[UNK]",
57
  "vocab_type": "spm"
58
  }
 
47
  "do_lower_case": false,
48
  "eos_token": "[SEP]",
49
  "mask_token": "[MASK]",
50
+ "max_length": 512,
51
  "model_max_length": 1000000000000000019884624838656,
52
+ "pad_to_multiple_of": null,
53
  "pad_token": "[PAD]",
54
+ "pad_token_type_id": 0,
55
+ "padding_side": "right",
56
  "sep_token": "[SEP]",
57
  "sp_model_kwargs": {},
58
  "split_by_punct": false,
59
+ "stride": 0,
60
  "tokenizer_class": "DebertaV2Tokenizer",
61
+ "truncation_side": "right",
62
+ "truncation_strategy": "longest_first",
63
  "unk_token": "[UNK]",
64
  "vocab_type": "spm"
65
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1a685d7ab4afbd2ef2ba2de5c90952fb016438bf574c829139c844bfa18dff28
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7a9a2c81bfb516c6ca8ebe7b6e22456e6a784b3ddb8b01d3d1257bc4554dd6e
3
  size 5176