Upload config.json with huggingface_hub
Browse files- config.json +3 -2
config.json
CHANGED
@@ -1,9 +1,10 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"
|
4 |
],
|
5 |
"attention_bias": false,
|
6 |
"attention_dropout": 0.0,
|
|
|
7 |
"bos_token_id": 128000,
|
8 |
"eos_token_id": [
|
9 |
128001,
|
@@ -26,7 +27,7 @@
|
|
26 |
"rope_scaling": null,
|
27 |
"rope_theta": 10000.0,
|
28 |
"tie_word_embeddings": false,
|
29 |
-
"torch_dtype": "
|
30 |
"transformers_version": "4.51.3",
|
31 |
"use_cache": true,
|
32 |
"vocab_size": 128256
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"LlamaForCausalLMEagle"
|
4 |
],
|
5 |
"attention_bias": false,
|
6 |
"attention_dropout": 0.0,
|
7 |
+
"attn_implementation": "flash_attention_2",
|
8 |
"bos_token_id": 128000,
|
9 |
"eos_token_id": [
|
10 |
128001,
|
|
|
27 |
"rope_scaling": null,
|
28 |
"rope_theta": 10000.0,
|
29 |
"tie_word_embeddings": false,
|
30 |
+
"torch_dtype": "bfloat16",
|
31 |
"transformers_version": "4.51.3",
|
32 |
"use_cache": true,
|
33 |
"vocab_size": 128256
|