|
from transformers import PretrainedConfig
|
|
|
|
class transformerConfig(PretrainedConfig):
|
|
model_type = "IQsignal_transformer"
|
|
|
|
def __init__(
|
|
self,
|
|
vocab_size : int = 32,
|
|
key_size : int = 32,
|
|
query_size : int = 32,
|
|
value_size : int = 32,
|
|
num_hiddens : int = 32,
|
|
norm_shape : int = 32,
|
|
ffn_num_input : int = 32,
|
|
ffn_num_hiddens : int = 64,
|
|
num_heads : int = 4,
|
|
num_layers : int = 2,
|
|
dropout : int = 0.1,
|
|
|
|
**kwargs,
|
|
):
|
|
self.vocab_size = vocab_size
|
|
self.key_size = key_size
|
|
self.query_size = query_size
|
|
self.value_size = value_size
|
|
self.num_hiddens = num_hiddens
|
|
self.norm_shape = norm_shape
|
|
self.ffn_num_input = ffn_num_input
|
|
self.ffn_num_hiddens = ffn_num_hiddens
|
|
self.num_heads = num_heads
|
|
self.num_layers = num_layers
|
|
self.dropout = dropout
|
|
|
|
super().__init__(**kwargs) |