Coobiw commited on
Commit
7e54849
·
verified ·
1 Parent(s): 8040fe5

add flash-attn config

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -7,6 +7,7 @@
7
  "AutoModel": "modeling_chartmoe.ChartMoEForCausalLM",
8
  "AutoModelForCausalLM": "modeling_chartmoe.ChartMoEForCausalLM"
9
  },
 
10
  "bias": false,
11
  "bos_token_id": 1,
12
  "eos_token_id": 2,
 
7
  "AutoModel": "modeling_chartmoe.ChartMoEForCausalLM",
8
  "AutoModelForCausalLM": "modeling_chartmoe.ChartMoEForCausalLM"
9
  },
10
+ "attn_implementation": "flash_attention_2",
11
  "bias": false,
12
  "bos_token_id": 1,
13
  "eos_token_id": 2,