File size: 587 Bytes
d17a8fc
 
 
 
15f730f
d17a8fc
 
 
15f730f
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from transformers import WhisperProcessor, WhisperForConditionalGeneration
import os

# Ensure the cache directory exists
cache_dir = "/app/.cache"
os.makedirs(cache_dir, exist_ok=True)

print("Downloading Whisper model and processor...")
try:
    processor = WhisperProcessor.from_pretrained("openai/whisper-large-v3", cache_dir=cache_dir)
    model = WhisperForConditionalGeneration.from_pretrained("openai/whisper-large-v3", cache_dir=cache_dir)
    print("✓ Model and processor downloaded successfully!")
except Exception as e:
    print(f"Failed to download model: {e}")
    raise