Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -19,13 +19,13 @@ import transformers
|
|
| 19 |
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, GenerationConfig, pipeline, set_seed
|
| 20 |
|
| 21 |
model_id = "meta-llama/Meta-Llama-3.1-70B-Instruct"
|
| 22 |
-
repo_id = "yujiepan/meta-llama-3.1-tiny-random"
|
| 23 |
save_path = f"/tmp/{repo_id}"
|
| 24 |
|
| 25 |
config = AutoConfig.from_pretrained(model_id, trust_remote_code=True)
|
| 26 |
config._name_or_path = model_id
|
| 27 |
-
config.hidden_size =
|
| 28 |
-
config.intermediate_size =
|
| 29 |
config.num_attention_heads = 2
|
| 30 |
config.num_key_value_heads = 1
|
| 31 |
config.num_hidden_layers = 2
|
|
|
|
| 19 |
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, GenerationConfig, pipeline, set_seed
|
| 20 |
|
| 21 |
model_id = "meta-llama/Meta-Llama-3.1-70B-Instruct"
|
| 22 |
+
repo_id = "yujiepan/meta-llama-3.1-tiny-random-hidden128"
|
| 23 |
save_path = f"/tmp/{repo_id}"
|
| 24 |
|
| 25 |
config = AutoConfig.from_pretrained(model_id, trust_remote_code=True)
|
| 26 |
config._name_or_path = model_id
|
| 27 |
+
config.hidden_size = 128
|
| 28 |
+
config.intermediate_size = 256
|
| 29 |
config.num_attention_heads = 2
|
| 30 |
config.num_key_value_heads = 1
|
| 31 |
config.num_hidden_layers = 2
|