{ "architectures": [ "GPTModel" ], "attn_pdrop": 0.1, "block_size": 1024, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "gpt", "n_embd": 768, "n_head": 12, "n_layer": 12, "resid_pdrop": 0.1, "torch_dtype": "float32", "transformers_version": "4.48.0.dev0", "vocab_size": 50257 }