File size: 352 Bytes
eb73104 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
{
"architectures": [
"Lake1ForAnyToAny"
],
"vocab_size": 20064,
"hidden_size": 1024,
"num_hidden_layers": 2,
"num_attention_heads": 8,
"embd_pdrop": 0.1,
"use_moe": false,
"num_experts": 4,
"expert_hidden_size": 4096,
"dropout": 0.1,
"model_type": "multi_modality",
"torch_dtype": "float16"
} |