# Docker Compose for Unraid # # Access at http://YOUR_UNRAID_IP:8580 # # ============================================ # CONFIGURE THESE PATHS FOR YOUR UNRAID SETUP # ============================================ # Edit the left side of the colon (:) for each volume mount # # DATABASE_PATH: Where to store the SQLite database # IMAGES_PATH: Where to store downloaded images (can be large, 100GB+) # EXPORTS_PATH: Where to store generated export zip files # IMPORTS_PATH: Where to place images for bulk import (source/species/images) # LOGS_PATH: Where to store scraper log files for debugging services: backend: build: context: /mnt/user/appdata/PlantGuideScraper/backend dockerfile: Dockerfile container_name: plant-scraper-backend restart: unless-stopped volumes: - /mnt/user/appdata/PlantGuideScraper/backend:/app:ro # === CONFIGURABLE DATA PATHS === - /mnt/user/downloads/PlantGuideDocker/database:/data/db # DATABASE_PATH - /mnt/user/downloads/PlantGuideDocker/images:/data/images # IMAGES_PATH - /mnt/user/downloads/PlantGuideDocker/exports:/data/exports # EXPORTS_PATH - /mnt/user/downloads/PlantGuideDocker/imports:/data/imports # IMPORTS_PATH - /mnt/user/downloads/PlantGuideDocker/logs:/data/logs # LOGS_PATH environment: - DATABASE_URL=sqlite:////data/db/plants.sqlite - REDIS_URL=redis://plant-scraper-redis:6379/0 - IMAGES_PATH=/data/images - EXPORTS_PATH=/data/exports - IMPORTS_PATH=/data/imports - LOGS_PATH=/data/logs depends_on: - redis command: uvicorn app.main:app --host 0.0.0.0 --port 8000 networks: - plant-scraper celery: build: context: /mnt/user/appdata/PlantGuideScraper/backend dockerfile: Dockerfile container_name: plant-scraper-celery restart: unless-stopped volumes: - /mnt/user/appdata/PlantGuideScraper/backend:/app:ro # === CONFIGURABLE DATA PATHS (must match backend) === - /mnt/user/downloads/PlantGuideDocker/database:/data/db # DATABASE_PATH - /mnt/user/downloads/PlantGuideDocker/images:/data/images # IMAGES_PATH - /mnt/user/downloads/PlantGuideDocker/exports:/data/exports # EXPORTS_PATH - /mnt/user/downloads/PlantGuideDocker/imports:/data/imports # IMPORTS_PATH - /mnt/user/downloads/PlantGuideDocker/logs:/data/logs # LOGS_PATH environment: - DATABASE_URL=sqlite:////data/db/plants.sqlite - REDIS_URL=redis://plant-scraper-redis:6379/0 - IMAGES_PATH=/data/images - EXPORTS_PATH=/data/exports - IMPORTS_PATH=/data/imports - LOGS_PATH=/data/logs depends_on: - redis command: celery -A app.workers.celery_app worker --beat --loglevel=info --concurrency=4 networks: - plant-scraper redis: image: redis:7-alpine container_name: plant-scraper-redis restart: unless-stopped volumes: - /mnt/user/appdata/PlantGuideScraper/redis:/data networks: - plant-scraper frontend: build: context: /mnt/user/appdata/PlantGuideScraper/frontend dockerfile: Dockerfile container_name: plant-scraper-frontend restart: unless-stopped volumes: - /mnt/user/appdata/PlantGuideScraper/frontend:/app - plant-scraper-node-modules:/app/node_modules environment: - VITE_API_URL= command: npm run dev -- --host networks: - plant-scraper nginx: image: nginx:alpine container_name: plant-scraper-nginx restart: unless-stopped ports: - "8580:80" volumes: - /mnt/user/appdata/PlantGuideScraper/nginx/nginx.conf:/etc/nginx/nginx.conf:ro depends_on: - backend - frontend networks: - plant-scraper networks: plant-scraper: name: plant-scraper volumes: plant-scraper-node-modules: