omegaconf # torch # torchvision transformers==4.44.2 sentencepiece==0.1.99 accelerate==0.33.0 einops==0.6.1 einops-exts==0.0.4 timm==0.6.13 # flash_attn https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl scipy # gradio monotonic_align librosa==0.8.0 phonemizer unidecode hydra-core==1.3.2 pytorch_lightning==1.1.0 wget wrapt onnx frozendict inflect braceexpand webdataset torch_stft sox editdistance numpy==1.23.5