File size: 645 Bytes
eeb28ca
161ddb0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e195e70
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
torch==2.4.0
torchvision
deepspeed==0.16.9
transformers==4.44.2
tokenizers
attrdict
diffusers==0.31.0
sentencepiece==0.1.99
shortuuid
accelerate
peft
bitsandbytes
requests
httpx==0.23.3
uvicorn 
fastapi
einops==0.6.1
einops-exts==0.0.4
timm==0.9.12
tensorboardX
tensorboard
tiktoken
loguru
pydantic==2.11.5
pydantic_core==2.33.2
markdown2[all]
numpy
scikit-learn==1.2.2
einx==0.3.0
Pillow==9.0.1
tenacity
sqlitedict
evaluate
sacrebleu
hf_transfer
scikit-image
torch_fidelity
imagesize
# flash attention
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl