bezzam HF Staff commited on
Commit
0c08558
·
verified ·
1 Parent(s): 1e43467

Upload LlasaForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +0 -11
config.json CHANGED
@@ -5,23 +5,12 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
- "codebook_size": 65536,
9
  "eos_token_id": 128261,
10
  "head_dim": 64,
11
  "hidden_act": "silu",
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 8192,
15
- "llasa_start_end_tokens": {
16
- "speech_generation_end": "<|SPEECH_GENERATION_END|>",
17
- "speech_generation_start": "<|SPEECH_GENERATION_START|>",
18
- "speech_understanding_end": "<|SPEECH_UNDERSTANDING_END|>",
19
- "speech_understanding_start": "<|SPEECH_UNDERSTANDING_START|>",
20
- "text_generation_end": "<|TEXT_GENERATION_END|>",
21
- "text_generation_start": "<|TEXT_GENERATION_START|>",
22
- "text_understanding_end": "<|TEXT_UNDERSTANDING_END|>",
23
- "text_understanding_start": "<|TEXT_UNDERSTANDING_START|>"
24
- },
25
  "max_length": null,
26
  "max_position_embeddings": 131072,
27
  "mlp_bias": false,
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": 128261,
9
  "head_dim": 64,
10
  "hidden_act": "silu",
11
  "hidden_size": 2048,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 8192,
 
 
 
 
 
 
 
 
 
 
14
  "max_length": null,
15
  "max_position_embeddings": 131072,
16
  "mlp_bias": false,