File size: 1,482 Bytes
20d337f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
{
"output_path": "/content/output/",
"train_path": "/content/ArabicNER/ANERCorp/train.txt",
"test_path": "/content/ArabicNER/ANERCorp/test.txt",
"val_path": "/content/ArabicNER/ANERCorp/val.txt",
"seed": 1,
"max_epochs": 50,
"batch_size": 8,
"bert_model": "UBC-NLP/ARBERTv2",
"num_workers": 1,
"gpus": [
0
],
"learning_rate": 1e-05,
"max_seq_len": 512,
"overwrite": "True",
"log_interval": 10,
"network_config": {
"fn": "arabiner.nn.BertSeqTagger",
"kwargs": {
"dropout": 0.1,
"bert_model": "aubmindlab/bert-base-arabertv2",
"num_labels": 9
}
},
"optimizer_config": {
"fn": "torch.optim.Adam",
"kwargs": {
"lr": 1e-05
}
},
"trainer_config": {
"fn": "arabiner.trainers.BertTrainer",
"kwargs": {
"max_epochs": 50
}
},
"data_config": {
"fn": "arabiner.data.datasets.DefaultDataset",
"kwargs": {
"max_seq_len": 512,
"bert_model": "aubmindlab/bert-base-arabertv2"
}
},
"optimizer": {
"fn": "torch.optim.AdamW",
"kwargs": {
"lr": 1e-05
}
},
"lr_scheduler": {
"fn": "torch.optim.lr_scheduler.ExponentialLR",
"kwargs": {
"gamma": 1
}
},
"loss": {
"fn": "torch.nn.CrossEntropyLoss",
"kwargs": {}
}
} |