End of training
Browse files- README.md +26 -26
- logs/events.out.tfevents.1741096961.DESKTOP-HA84SVN.3577307.1 +2 -2
- model.safetensors +1 -1
- tokenizer.json +2 -16
- tokenizer_config.json +7 -0
README.md
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
---
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
-
base_model:
|
5 |
tags:
|
6 |
- generated_from_trainer
|
7 |
model-index:
|
@@ -14,16 +14,16 @@ should probably proofread and complete it, then remove this comment. -->
|
|
14 |
|
15 |
# layoutlm-funsd
|
16 |
|
17 |
-
This model is a fine-tuned version of [
|
18 |
It achieves the following results on the evaluation set:
|
19 |
-
- Loss: 0.
|
20 |
-
- Eader: {'precision': 0.
|
21 |
-
- Nswer: {'precision': 0.
|
22 |
-
- Uestion: {'precision': 0.
|
23 |
-
- Overall Precision: 0.
|
24 |
- Overall Recall: 0.45
|
25 |
-
- Overall F1: 0.
|
26 |
-
- Overall Accuracy: 0.
|
27 |
|
28 |
## Model description
|
29 |
|
@@ -53,23 +53,23 @@ The following hyperparameters were used during training:
|
|
53 |
|
54 |
### Training results
|
55 |
|
56 |
-
| Training Loss | Epoch | Step | Validation Loss | Eader
|
57 |
-
|
58 |
-
|
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
|
74 |
|
75 |
### Framework versions
|
|
|
1 |
---
|
2 |
library_name: transformers
|
3 |
license: mit
|
4 |
+
base_model: pabloma09/layoutlm-funsd
|
5 |
tags:
|
6 |
- generated_from_trainer
|
7 |
model-index:
|
|
|
14 |
|
15 |
# layoutlm-funsd
|
16 |
|
17 |
+
This model is a fine-tuned version of [pabloma09/layoutlm-funsd](https://huggingface.co/pabloma09/layoutlm-funsd) on the None dataset.
|
18 |
It achieves the following results on the evaluation set:
|
19 |
+
- Loss: 0.9937
|
20 |
+
- Eader: {'precision': 0.35, 'recall': 0.21875, 'f1': 0.2692307692307692, 'number': 32}
|
21 |
+
- Nswer: {'precision': 0.5135135135135135, 'recall': 0.5428571428571428, 'f1': 0.5277777777777778, 'number': 70}
|
22 |
+
- Uestion: {'precision': 0.4931506849315068, 'recall': 0.46153846153846156, 'f1': 0.4768211920529801, 'number': 78}
|
23 |
+
- Overall Precision: 0.4850
|
24 |
- Overall Recall: 0.45
|
25 |
+
- Overall F1: 0.4669
|
26 |
+
- Overall Accuracy: 0.8029
|
27 |
|
28 |
## Model description
|
29 |
|
|
|
53 |
|
54 |
### Training results
|
55 |
|
56 |
+
| Training Loss | Epoch | Step | Validation Loss | Eader | Nswer | Uestion | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
|
57 |
+
|:-------------:|:-----:|:----:|:---------------:|:---------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
|
58 |
+
| 0.1124 | 1.0 | 13 | 0.8542 | {'precision': 0.32, 'recall': 0.25, 'f1': 0.2807017543859649, 'number': 32} | {'precision': 0.4647887323943662, 'recall': 0.4714285714285714, 'f1': 0.46808510638297873, 'number': 70} | {'precision': 0.4861111111111111, 'recall': 0.44871794871794873, 'f1': 0.4666666666666667, 'number': 78} | 0.4524 | 0.4222 | 0.4368 | 0.7848 |
|
59 |
+
| 0.1002 | 2.0 | 26 | 0.8579 | {'precision': 0.3181818181818182, 'recall': 0.21875, 'f1': 0.25925925925925924, 'number': 32} | {'precision': 0.4146341463414634, 'recall': 0.4857142857142857, 'f1': 0.4473684210526316, 'number': 70} | {'precision': 0.4125, 'recall': 0.4230769230769231, 'f1': 0.4177215189873418, 'number': 78} | 0.4022 | 0.4111 | 0.4066 | 0.7559 |
|
60 |
+
| 0.0905 | 3.0 | 39 | 0.7874 | {'precision': 0.34782608695652173, 'recall': 0.25, 'f1': 0.2909090909090909, 'number': 32} | {'precision': 0.45, 'recall': 0.5142857142857142, 'f1': 0.48, 'number': 70} | {'precision': 0.5, 'recall': 0.48717948717948717, 'f1': 0.49350649350649345, 'number': 78} | 0.4581 | 0.4556 | 0.4568 | 0.7987 |
|
61 |
+
| 0.0743 | 4.0 | 52 | 0.9167 | {'precision': 0.4, 'recall': 0.1875, 'f1': 0.25531914893617025, 'number': 32} | {'precision': 0.48, 'recall': 0.5142857142857142, 'f1': 0.496551724137931, 'number': 70} | {'precision': 0.5333333333333333, 'recall': 0.5128205128205128, 'f1': 0.5228758169934641, 'number': 78} | 0.4970 | 0.4556 | 0.4754 | 0.7926 |
|
62 |
+
| 0.0534 | 5.0 | 65 | 0.9266 | {'precision': 0.45, 'recall': 0.28125, 'f1': 0.34615384615384615, 'number': 32} | {'precision': 0.43373493975903615, 'recall': 0.5142857142857142, 'f1': 0.47058823529411764, 'number': 70} | {'precision': 0.4805194805194805, 'recall': 0.47435897435897434, 'f1': 0.47741935483870973, 'number': 78} | 0.4556 | 0.4556 | 0.4556 | 0.7800 |
|
63 |
+
| 0.0465 | 6.0 | 78 | 1.0600 | {'precision': 0.3, 'recall': 0.1875, 'f1': 0.23076923076923075, 'number': 32} | {'precision': 0.4864864864864865, 'recall': 0.5142857142857142, 'f1': 0.5, 'number': 70} | {'precision': 0.4666666666666667, 'recall': 0.44871794871794873, 'f1': 0.45751633986928103, 'number': 78} | 0.4556 | 0.4278 | 0.4413 | 0.7197 |
|
64 |
+
| 0.0388 | 7.0 | 91 | 0.9172 | {'precision': 0.4444444444444444, 'recall': 0.25, 'f1': 0.32, 'number': 32} | {'precision': 0.5, 'recall': 0.5571428571428572, 'f1': 0.5270270270270271, 'number': 70} | {'precision': 0.4625, 'recall': 0.47435897435897434, 'f1': 0.46835443037974683, 'number': 78} | 0.4773 | 0.4667 | 0.4719 | 0.7740 |
|
65 |
+
| 0.0333 | 8.0 | 104 | 0.9758 | {'precision': 0.3684210526315789, 'recall': 0.21875, 'f1': 0.2745098039215686, 'number': 32} | {'precision': 0.4157303370786517, 'recall': 0.5285714285714286, 'f1': 0.46540880503144655, 'number': 70} | {'precision': 0.4186046511627907, 'recall': 0.46153846153846156, 'f1': 0.4390243902439025, 'number': 78} | 0.4124 | 0.4444 | 0.4278 | 0.7770 |
|
66 |
+
| 0.0269 | 9.0 | 117 | 0.9879 | {'precision': 0.2916666666666667, 'recall': 0.21875, 'f1': 0.25, 'number': 32} | {'precision': 0.4, 'recall': 0.5142857142857142, 'f1': 0.45, 'number': 70} | {'precision': 0.3888888888888889, 'recall': 0.44871794871794873, 'f1': 0.41666666666666663, 'number': 78} | 0.3824 | 0.4333 | 0.4062 | 0.7794 |
|
67 |
+
| 0.0255 | 10.0 | 130 | 0.9909 | {'precision': 0.3, 'recall': 0.1875, 'f1': 0.23076923076923075, 'number': 32} | {'precision': 0.43902439024390244, 'recall': 0.5142857142857142, 'f1': 0.4736842105263158, 'number': 70} | {'precision': 0.4358974358974359, 'recall': 0.4358974358974359, 'f1': 0.4358974358974359, 'number': 78} | 0.4222 | 0.4222 | 0.4222 | 0.7914 |
|
68 |
+
| 0.0217 | 11.0 | 143 | 0.9914 | {'precision': 0.35, 'recall': 0.21875, 'f1': 0.2692307692307692, 'number': 32} | {'precision': 0.5066666666666667, 'recall': 0.5428571428571428, 'f1': 0.5241379310344827, 'number': 70} | {'precision': 0.45121951219512196, 'recall': 0.47435897435897434, 'f1': 0.46249999999999997, 'number': 78} | 0.4633 | 0.4556 | 0.4594 | 0.7951 |
|
69 |
+
| 0.024 | 12.0 | 156 | 0.9999 | {'precision': 0.3684210526315789, 'recall': 0.21875, 'f1': 0.2745098039215686, 'number': 32} | {'precision': 0.5064935064935064, 'recall': 0.5571428571428572, 'f1': 0.5306122448979592, 'number': 70} | {'precision': 0.475, 'recall': 0.48717948717948717, 'f1': 0.4810126582278481, 'number': 78} | 0.4773 | 0.4667 | 0.4719 | 0.7896 |
|
70 |
+
| 0.0197 | 13.0 | 169 | 0.9820 | {'precision': 0.3333333333333333, 'recall': 0.21875, 'f1': 0.2641509433962264, 'number': 32} | {'precision': 0.49333333333333335, 'recall': 0.5285714285714286, 'f1': 0.5103448275862069, 'number': 70} | {'precision': 0.5205479452054794, 'recall': 0.48717948717948717, 'f1': 0.5033112582781456, 'number': 78} | 0.4852 | 0.4556 | 0.4699 | 0.8053 |
|
71 |
+
| 0.022 | 14.0 | 182 | 0.9912 | {'precision': 0.35, 'recall': 0.21875, 'f1': 0.2692307692307692, 'number': 32} | {'precision': 0.5, 'recall': 0.5428571428571428, 'f1': 0.5205479452054795, 'number': 70} | {'precision': 0.4864864864864865, 'recall': 0.46153846153846156, 'f1': 0.47368421052631576, 'number': 78} | 0.4765 | 0.45 | 0.4629 | 0.8023 |
|
72 |
+
| 0.0215 | 15.0 | 195 | 0.9937 | {'precision': 0.35, 'recall': 0.21875, 'f1': 0.2692307692307692, 'number': 32} | {'precision': 0.5135135135135135, 'recall': 0.5428571428571428, 'f1': 0.5277777777777778, 'number': 70} | {'precision': 0.4931506849315068, 'recall': 0.46153846153846156, 'f1': 0.4768211920529801, 'number': 78} | 0.4850 | 0.45 | 0.4669 | 0.8029 |
|
73 |
|
74 |
|
75 |
### Framework versions
|
logs/events.out.tfevents.1741096961.DESKTOP-HA84SVN.3577307.1
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0db03db479a12996a6d6e5ce6bd0d80d4072b29879acca018b07ca91bd2f55f
|
3 |
+
size 16203
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 450548984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c7ef94d4ffdda985b490413d5c9360b9f5c2aaa1a3150b167cd21c54d7f5860e
|
3 |
size 450548984
|
tokenizer.json
CHANGED
@@ -1,21 +1,7 @@
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
-
"truncation":
|
4 |
-
|
5 |
-
"max_length": 512,
|
6 |
-
"strategy": "LongestFirst",
|
7 |
-
"stride": 0
|
8 |
-
},
|
9 |
-
"padding": {
|
10 |
-
"strategy": {
|
11 |
-
"Fixed": 512
|
12 |
-
},
|
13 |
-
"direction": "Right",
|
14 |
-
"pad_to_multiple_of": null,
|
15 |
-
"pad_id": 0,
|
16 |
-
"pad_type_id": 0,
|
17 |
-
"pad_token": "[PAD]"
|
18 |
-
},
|
19 |
"added_tokens": [
|
20 |
{
|
21 |
"id": 0,
|
|
|
1 |
{
|
2 |
"version": "1.0",
|
3 |
+
"truncation": null,
|
4 |
+
"padding": null,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
"added_tokens": [
|
6 |
{
|
7 |
"id": 0,
|
tokenizer_config.json
CHANGED
@@ -55,9 +55,11 @@
|
|
55 |
"do_lower_case": true,
|
56 |
"extra_special_tokens": {},
|
57 |
"mask_token": "[MASK]",
|
|
|
58 |
"model_max_length": 512,
|
59 |
"never_split": null,
|
60 |
"only_label_first_subword": true,
|
|
|
61 |
"pad_token": "[PAD]",
|
62 |
"pad_token_box": [
|
63 |
0,
|
@@ -66,6 +68,8 @@
|
|
66 |
0
|
67 |
],
|
68 |
"pad_token_label": -100,
|
|
|
|
|
69 |
"processor_class": "LayoutLMv2Processor",
|
70 |
"sep_token": "[SEP]",
|
71 |
"sep_token_box": [
|
@@ -74,8 +78,11 @@
|
|
74 |
1000,
|
75 |
1000
|
76 |
],
|
|
|
77 |
"strip_accents": null,
|
78 |
"tokenize_chinese_chars": true,
|
79 |
"tokenizer_class": "LayoutLMv2Tokenizer",
|
|
|
|
|
80 |
"unk_token": "[UNK]"
|
81 |
}
|
|
|
55 |
"do_lower_case": true,
|
56 |
"extra_special_tokens": {},
|
57 |
"mask_token": "[MASK]",
|
58 |
+
"max_length": 512,
|
59 |
"model_max_length": 512,
|
60 |
"never_split": null,
|
61 |
"only_label_first_subword": true,
|
62 |
+
"pad_to_multiple_of": null,
|
63 |
"pad_token": "[PAD]",
|
64 |
"pad_token_box": [
|
65 |
0,
|
|
|
68 |
0
|
69 |
],
|
70 |
"pad_token_label": -100,
|
71 |
+
"pad_token_type_id": 0,
|
72 |
+
"padding_side": "right",
|
73 |
"processor_class": "LayoutLMv2Processor",
|
74 |
"sep_token": "[SEP]",
|
75 |
"sep_token_box": [
|
|
|
78 |
1000,
|
79 |
1000
|
80 |
],
|
81 |
+
"stride": 0,
|
82 |
"strip_accents": null,
|
83 |
"tokenize_chinese_chars": true,
|
84 |
"tokenizer_class": "LayoutLMv2Tokenizer",
|
85 |
+
"truncation_side": "right",
|
86 |
+
"truncation_strategy": "longest_first",
|
87 |
"unk_token": "[UNK]"
|
88 |
}
|