Spaces:
inQuestAI
/
Runtime error

fffiloni commited on
Commit
4594c83
·
verified ·
1 Parent(s): b444c90

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -6
app.py CHANGED
@@ -12,12 +12,21 @@ cuda_lib_path = "/usr/local/cuda/lib64"
12
  os.environ['PATH'] = f"{cuda_bin_path}:{os.environ.get('PATH', '')}"
13
  os.environ['LD_LIBRARY_PATH'] = f"{cuda_lib_path}:{os.environ.get('LD_LIBRARY_PATH', '')}"
14
 
15
- # Install flash attention
16
- subprocess.run(
17
- "pip install flash-attn --no-build-isolation",
18
- env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
19
- shell=True,
20
- )
 
 
 
 
 
 
 
 
 
21
 
22
  from huggingface_hub import snapshot_download
23
 
 
12
  os.environ['PATH'] = f"{cuda_bin_path}:{os.environ.get('PATH', '')}"
13
  os.environ['LD_LIBRARY_PATH'] = f"{cuda_lib_path}:{os.environ.get('LD_LIBRARY_PATH', '')}"
14
 
15
+ # Install required package
16
+ def install_flash_attn():
17
+ try:
18
+ print("Installing flash-attn...")
19
+ subprocess.run(
20
+ ["pip", "install", "flash-attn", "--no-build-isolation"],
21
+ check=True
22
+ )
23
+ print("flash-attn installed successfully!")
24
+ except subprocess.CalledProcessError as e:
25
+ print(f"Failed to install flash-attn: {e}")
26
+ exit(1)
27
+
28
+ # Install flash-attn
29
+ install_flash_attn()
30
 
31
  from huggingface_hub import snapshot_download
32