Transformers
English
Inference Endpoints
albert-base-v2 / config.json
s4sarath's picture
Add config
ce81c9f
raw
history blame contribute delete
713 Bytes
{
"vocab_size": 30000,
"embedding_size": 128,
"num_hidden_layers": 12,
"attention_head_size": 64,
"num_attention_heads": 12,
"intermediate_size": 3072,
"embedding_projection_size": 768,
"hidden_act": "gelu",
"intermediate_act": "gelu",
"hidden_dropout_prob": 0,
"attention_probs_dropout_prob": 0,
"max_position_embeddings": 512,
"type_vocab_size": 2,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-12,
"position_embedding_type": "absolute",
"num_hidden_groups": 1,
"positional_buckets": null,
"bidirectional": null,
"cls_token_id": null,
"sep_token_id": null,
"decoder_start_token_id": null,
"pad_token_id": null,
"bos_token_id": null,
"eos_token_id": null
}