omegaconf torch==2.0.1 torchvision==0.15.2 transformers==4.44.0 sentencepiece==0.1.99 accelerate==0.33.0 einops==0.6.1 einops-exts==0.0.4 timm==0.6.13 https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.5/flash_attn-2.3.5+cu117torch2.0cxx11abiFALSE-cp310-cp310-linux_x86_64.whl scipy gradio monotonic_align librosa==0.8.0 phonemizer unidecode hydra-core==1.3.2 pytorch_lightning==1.1.0 wget wrapt onnx frozendict inflect braceexpand webdataset torch_stft sox editdistance numpy==1.23.5