{ "activation": "gelu", "classification": true, "d_ff": 512, "d_model": 128, "dropout": 0.14904317667318695, "e_layers": 8, "enc_in": 9, "factor": 1, "n_heads": 4, "num_classes": 10, "output_attention": false, "seq_len": 200 }