liuyang commited on
Commit
4906209
·
1 Parent(s): e91cdbe

requirements

Browse files
Files changed (1) hide show
  1. requirements.txt +2 -2
requirements.txt CHANGED
@@ -1,5 +1,5 @@
1
  # 1. Do NOT pin torch/torchaudio here – keep the CUDA builds that come with the image
2
- torch==2.3.1
3
  transformers==4.48.0
4
  # Removed flash-attention since faster-whisper handles this internally
5
  # https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.0.8/flash_attn-2.7.4.post1+cu126torch2.4-cp310-cp310-linux_x86_64.whl
@@ -18,4 +18,4 @@ librosa>=0.10.0
18
  soundfile>=0.12.0
19
  ffmpeg-python>=0.2.0
20
  requests>=2.28.0
21
- nvidia-cudnn-cu12==9.1.1.17 # any 9.1.x that pip can find is fine
 
1
  # 1. Do NOT pin torch/torchaudio here – keep the CUDA builds that come with the image
2
+ torch==2.4.0
3
  transformers==4.48.0
4
  # Removed flash-attention since faster-whisper handles this internally
5
  # https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.0.8/flash_attn-2.7.4.post1+cu126torch2.4-cp310-cp310-linux_x86_64.whl
 
18
  soundfile>=0.12.0
19
  ffmpeg-python>=0.2.0
20
  requests>=2.28.0
21
+ nvidia-cudnn-cu12==9.1.0.70 # any 9.1.x that pip can find is fine