|
2023-09-02 00:42:11,828 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Model: "SequenceTagger( |
|
(embeddings): TransformerWordEmbeddings( |
|
(model): ElectraModel( |
|
(embeddings): ElectraEmbeddings( |
|
(word_embeddings): Embedding(32001, 768) |
|
(position_embeddings): Embedding(512, 768) |
|
(token_type_embeddings): Embedding(2, 768) |
|
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) |
|
(dropout): Dropout(p=0.1, inplace=False) |
|
) |
|
(encoder): ElectraEncoder( |
|
(layer): ModuleList( |
|
(0-11): 12 x ElectraLayer( |
|
(attention): ElectraAttention( |
|
(self): ElectraSelfAttention( |
|
(query): Linear(in_features=768, out_features=768, bias=True) |
|
(key): Linear(in_features=768, out_features=768, bias=True) |
|
(value): Linear(in_features=768, out_features=768, bias=True) |
|
(dropout): Dropout(p=0.1, inplace=False) |
|
) |
|
(output): ElectraSelfOutput( |
|
(dense): Linear(in_features=768, out_features=768, bias=True) |
|
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) |
|
(dropout): Dropout(p=0.1, inplace=False) |
|
) |
|
) |
|
(intermediate): ElectraIntermediate( |
|
(dense): Linear(in_features=768, out_features=3072, bias=True) |
|
(intermediate_act_fn): GELUActivation() |
|
) |
|
(output): ElectraOutput( |
|
(dense): Linear(in_features=3072, out_features=768, bias=True) |
|
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True) |
|
(dropout): Dropout(p=0.1, inplace=False) |
|
) |
|
) |
|
) |
|
) |
|
) |
|
) |
|
(locked_dropout): LockedDropout(p=0.5) |
|
(linear): Linear(in_features=768, out_features=21, bias=True) |
|
(loss_function): CrossEntropyLoss() |
|
)" |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 MultiCorpus: 3575 train + 1235 dev + 1266 test sentences |
|
- NER_HIPE_2022 Corpus: 3575 train + 1235 dev + 1266 test sentences - /home/stefan/.flair/datasets/ner_hipe_2022/v2.1/hipe2020/de/with_doc_seperator |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Train: 3575 sentences |
|
2023-09-02 00:42:11,829 (train_with_dev=False, train_with_test=False) |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Training Params: |
|
2023-09-02 00:42:11,829 - learning_rate: "3e-05" |
|
2023-09-02 00:42:11,829 - mini_batch_size: "8" |
|
2023-09-02 00:42:11,829 - max_epochs: "10" |
|
2023-09-02 00:42:11,829 - shuffle: "True" |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Plugins: |
|
2023-09-02 00:42:11,829 - LinearScheduler | warmup_fraction: '0.1' |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Final evaluation on model from best epoch (best-model.pt) |
|
2023-09-02 00:42:11,829 - metric: "('micro avg', 'f1-score')" |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Computation: |
|
2023-09-02 00:42:11,829 - compute on device: cuda:0 |
|
2023-09-02 00:42:11,829 - embedding storage: none |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 Model training base path: "hmbench-hipe2020/de-hmteams/teams-base-historic-multilingual-discriminator-bs8-wsFalse-e10-lr3e-05-poolingfirst-layers-1-crfFalse-1" |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
2023-09-02 00:42:11,829 ---------------------------------------------------------------------------------------------------- |
|
|