ariG23498 HF staff commited on
Commit
22dc319
·
1 Parent(s): 2b310a5

remove flash

Browse files
Files changed (2) hide show
  1. app.py +0 -1
  2. requirements.txt +0 -1
app.py CHANGED
@@ -14,7 +14,6 @@ model = AutoModelForCausalLM.from_pretrained(
14
  device_map="cuda",
15
  torch_dtype="auto",
16
  trust_remote_code=True,
17
- attn_implementation='flash_attention_2',
18
  ).cuda()
19
 
20
  generation_config = GenerationConfig.from_pretrained(model_path)
 
14
  device_map="cuda",
15
  torch_dtype="auto",
16
  trust_remote_code=True,
 
17
  ).cuda()
18
 
19
  generation_config = GenerationConfig.from_pretrained(model_path)
requirements.txt CHANGED
@@ -7,6 +7,5 @@ scipy
7
  soundfile
8
  pillow
9
  accelerate
10
- flash-attn
11
  transformers
12
  backoff
 
7
  soundfile
8
  pillow
9
  accelerate
 
10
  transformers
11
  backoff