File size: 240 Bytes
cea2b0c
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "encoders_hidden_size": 768,
  "ignore_index": -100,
  "llm_name": "bilalfaye/gpt",
  "max_inference_batch_size": 32,
  "max_seq_len": 256,
  "modal_tokens": 30,
  "multiple_of": 256,
  "n_head": 8,
  "n_up_layers": 8,
  "up_dim": 768
}