LightDiffusion-Next / docker-compose.yml
Aatricks's picture
Deploy ZeroGPU Gradio Space snapshot
b701455
services:
lightdiffusion:
build:
context: .
dockerfile: Dockerfile
args:
# Specify target GPU architectures for CUDA extension builds
# 8.0: A100, 8.6: RTX 30xx, 8.9: RTX 40xx, 9.0: H100, 12.0: RTX 50xx (Blackwell)
# Customize based on your GPU: TORCH_CUDA_ARCH_LIST: "12.0" for RTX 50xx only
TORCH_CUDA_ARCH_LIST: "8.0;8.6;8.9;9.0;12.0"
INSTALL_STABLE_FAST: "0"
INSTALL_OLLAMA: "0"
INSTALL_SAGEATTENTION: "0"
INSTALL_SPARGEATTN: "0"
ports:
- "7860:7860" # FastAPI backend serving the built React UI
volumes:
# Mount output directory to persist generated images
- ./output:/app/output
# Mount checkpoints directory for model files
- ./include/checkpoints:/app/include/checkpoints
# Mount other model directories
- ./include/loras:/app/include/loras
- ./include/embeddings:/app/include/embeddings
- ./include/ESRGAN:/app/include/ESRGAN
- ./include/yolos:/app/include/yolos
environment:
- PORT=7860
- CUDA_VISIBLE_DEVICES=0
- CUDA_HOME=/usr/local/cuda
- PROMPT_ENHANCER_MODEL=qwen3:0.6b
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [ gpu ]
restart: unless-stopped
stdin_open: true
tty: true