File size: 344 Bytes
67b36a4
 
 
 
 
 
8b4c550
d9d4ac7
 
babd02b
 
1
2
3
4
5
6
7
8
9
10
11
12
transformers>=4.45.0
accelerate>=0.33.0
qwen-vl-utils>=0.0.8
gradio>=5.49.1
spaces>=0.24.0
pillow
torchvision

# Optional: FlashAttention v2 for faster attention on compatible Linux CUDA GPUs.
# This installs only on 64-bit Linux. It will be skipped on macOS/Windows/ARM.
flash-attn; platform_system == "Linux" and platform_machine == "x86_64"