File size: 729 Bytes
be73458
 
 
 
 
 
 
 
 
 
 
 
2003211
be73458
 
 
 
 
 
7b75adb
be73458
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7b75adb
 
0e62897
ac3c1c1
c674d5b
be73458
7b75adb
 
 
23bf191
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# Build Tools
ninja==1.11.1.1

# Core ML/Deep Learning
transformers==4.46.0
diffusers==0.30.0
accelerate==1.1.1
pytorch-lightning==1.9.5
huggingface-hub==0.30.2
safetensors==0.4.4

# Scientific Computing
numpy==2.1.2
scipy==1.14.1
einops==0.8.0
pandas==2.2.2

# 3D Mesh Processing
trimesh==4.4.7
pymeshlab==2022.2.post3

# Configuration Management
omegaconf==2.3.0
pyyaml==6.0.2
configargparse==1.7

# Utilities
tqdm==4.66.5

# GPU Computing (requires CUDA)
cupy-cuda12x==13.4.1

# ONNX Runtime
onnxruntime==1.16.3
torchmetrics==1.6.0

timm
numba
fpsample
scikit-learn
addict
scikit-image

# sonata
spconv-cu126
torch-scatter -f https://data.pyg.org/whl/torch-2.8.0+cu126.html
git+https://github.com/Dao-AILab/flash-attention.git