ykhwang commited on
Commit
82e3280
·
1 Parent(s): 5e08339

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "42dot-plm-1.3b",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -9,7 +9,7 @@
9
  "hidden_size": 2048,
10
  "initializer_range": 0.01,
11
  "intermediate_size": 5632,
12
- "max_position_embeddings": 8192,
13
  "model_type": "llama",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 24,
 
1
  {
2
+ "_name_or_path": "llama_1.3b_enko_v230free_4k_300b",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
9
  "hidden_size": 2048,
10
  "initializer_range": 0.01,
11
  "intermediate_size": 5632,
12
+ "max_position_embeddings": 4096,
13
  "model_type": "llama",
14
  "num_attention_heads": 32,
15
  "num_hidden_layers": 24,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4e2ac90def8d8d6e8c2a674d0588e9e6e8f2b58f1c788b1a944dda45468c9afc
3
  size 5757165853
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cbe77e96064b09b39c6a8ffbacd8fb0d83237119eb65dac5a8fed57cd325b54
3
  size 5757165853