{ "architectures": [ "transformerModel" ], "auto_map": { "AutoConfig": "modeling_IQtransformer.transformerConfig", "AutoModelForCausalLM": "modeling_IQtransformer.transformerModel" }, "dropout": 0.1, "ffn_num_hiddens": 64, "ffn_num_input": 32, "key_size": 32, "model_type": "IQsignal_transformer", "norm_shape": [ 32 ], "num_heads": 4, "num_hiddens": 32, "num_layers": 2, "query_size": 32, "torch_dtype": "float32", "transformers_version": "4.45.2", "value_size": 32, "vocab_size": 32 }