File size: 354 Bytes
325af43
76be717
 
 
 
 
 
 
 
 
9e5f025
76be717
be9af7e
37aa82c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl 
trimesh==4.2.3
accelerate==0.28.0
mesh2sdf==1.1.0
einops==0.7.0
einx==0.1.3
optimum==1.18.0
omegaconf==2.3.0
opencv-python==4.9.0.80
transformers==4.39.3
numpy==1.26.4
huggingface_hub
spaces
gradio