Upload folder using huggingface_hub
Browse files- config.json +2 -2
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -24,7 +24,7 @@
|
|
| 24 |
"mamba_conv_bias": true,
|
| 25 |
"mamba_d_conv": 4,
|
| 26 |
"mamba_d_state": 16,
|
| 27 |
-
"mamba_dt_rank":
|
| 28 |
"mamba_expand": 2,
|
| 29 |
"mamba_inner_layernorms": true,
|
| 30 |
"mamba_proj_bias": false,
|
|
@@ -33,7 +33,7 @@
|
|
| 33 |
"num_attention_heads": 4,
|
| 34 |
"num_experts": 16,
|
| 35 |
"num_experts_per_tok": 2,
|
| 36 |
-
"num_hidden_layers":
|
| 37 |
"num_key_value_heads": 2,
|
| 38 |
"output_router_logits": false,
|
| 39 |
"pad_token_id": 0,
|
|
|
|
| 24 |
"mamba_conv_bias": true,
|
| 25 |
"mamba_d_conv": 4,
|
| 26 |
"mamba_d_state": 16,
|
| 27 |
+
"mamba_dt_rank": 256,
|
| 28 |
"mamba_expand": 2,
|
| 29 |
"mamba_inner_layernorms": true,
|
| 30 |
"mamba_proj_bias": false,
|
|
|
|
| 33 |
"num_attention_heads": 4,
|
| 34 |
"num_experts": 16,
|
| 35 |
"num_experts_per_tok": 2,
|
| 36 |
+
"num_hidden_layers": 16,
|
| 37 |
"num_key_value_heads": 2,
|
| 38 |
"output_router_logits": false,
|
| 39 |
"pad_token_id": 0,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b6ab01c6269c51bc084de15f4b817241cfa38699b5775f4f290889e9ac8f6ef9
|
| 3 |
+
size 1274744
|