services: db: image: postgres:15 environment: POSTGRES_DB: cve_sigma_db POSTGRES_USER: cve_user POSTGRES_PASSWORD: cve_password volumes: - postgres_data:/var/lib/postgresql/data - ./init.sql:/docker-entrypoint-initdb.d/init.sql ports: - "5432:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -U cve_user -d cve_sigma_db"] interval: 30s timeout: 10s retries: 3 backend: build: ./backend ports: - "8000:8000" environment: DATABASE_URL: postgresql://cve_user:cve_password@db:5432/cve_sigma_db CELERY_BROKER_URL: redis://redis:6379/0 CELERY_RESULT_BACKEND: redis://redis:6379/0 NVD_API_KEY: ${NVD_API_KEY:-} GITHUB_TOKEN: ${GITHUB_TOKEN} OPENAI_API_KEY: ${OPENAI_API_KEY:-} ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} OLLAMA_BASE_URL: ${OLLAMA_BASE_URL:-http://ollama:11434} LLM_PROVIDER: ${LLM_PROVIDER:-ollama} LLM_MODEL: ${LLM_MODEL:-llama3.2} LLM_ENABLED: ${LLM_ENABLED:-true} FINETUNED_MODEL_PATH: ${FINETUNED_MODEL_PATH:-/app/models/sigma_llama_finetuned} HUGGING_FACE_TOKEN: ${HUGGING_FACE_TOKEN} depends_on: db: condition: service_healthy redis: condition: service_started ollama-setup: condition: service_completed_successfully volumes: - ./backend:/app - ./github_poc_collector:/github_poc_collector - ./exploit-db-mirror:/app/exploit-db-mirror - ./models:/app/models command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload frontend: build: ./frontend ports: - "3000:3000" environment: REACT_APP_API_URL: http://localhost:8000 volumes: - ./frontend:/app - /app/node_modules command: npm start redis: image: redis:7-alpine ports: - "6379:6379" command: redis-server --appendonly yes volumes: - redis_data:/data ollama: image: ollama/ollama:latest ports: - "11434:11434" volumes: - ollama_data:/root/.ollama environment: - OLLAMA_HOST=0.0.0.0 restart: unless-stopped deploy: resources: limits: memory: 5G reservations: memory: 3G ollama-setup: build: ./backend depends_on: - ollama environment: OLLAMA_BASE_URL: http://ollama:11434 LLM_MODEL: llama3.2 volumes: - ./backend:/app command: python setup_ollama_with_sigma.py restart: "no" initial-setup: build: ./backend depends_on: db: condition: service_healthy redis: condition: service_started celery-worker: condition: service_healthy environment: DATABASE_URL: postgresql://cve_user:cve_password@db:5432/cve_sigma_db CELERY_BROKER_URL: redis://redis:6379/0 CELERY_RESULT_BACKEND: redis://redis:6379/0 volumes: - ./backend:/app command: python initial_setup.py restart: "no" celery-worker: build: ./backend command: celery -A celery_config worker --loglevel=info --concurrency=4 environment: DATABASE_URL: postgresql://cve_user:cve_password@db:5432/cve_sigma_db CELERY_BROKER_URL: redis://redis:6379/0 CELERY_RESULT_BACKEND: redis://redis:6379/0 NVD_API_KEY: ${NVD_API_KEY:-} GITHUB_TOKEN: ${GITHUB_TOKEN} OPENAI_API_KEY: ${OPENAI_API_KEY:-} ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} OLLAMA_BASE_URL: ${OLLAMA_BASE_URL:-http://ollama:11434} LLM_PROVIDER: ${LLM_PROVIDER:-ollama} LLM_MODEL: ${LLM_MODEL:-llama3.2} LLM_ENABLED: ${LLM_ENABLED:-true} FINETUNED_MODEL_PATH: ${FINETUNED_MODEL_PATH:-/app/models/sigma_llama_finetuned} HUGGING_FACE_TOKEN: ${HUGGING_FACE_TOKEN} depends_on: db: condition: service_healthy redis: condition: service_started ollama-setup: condition: service_completed_successfully volumes: - ./backend:/app - ./github_poc_collector:/github_poc_collector - ./exploit-db-mirror:/app/exploit-db-mirror - ./models:/app/models restart: unless-stopped healthcheck: test: ["CMD", "celery", "-A", "celery_config", "inspect", "ping"] interval: 30s timeout: 10s retries: 3 celery-beat: build: ./backend command: celery -A celery_config beat --loglevel=info --pidfile=/tmp/celerybeat.pid environment: DATABASE_URL: postgresql://cve_user:cve_password@db:5432/cve_sigma_db CELERY_BROKER_URL: redis://redis:6379/0 CELERY_RESULT_BACKEND: redis://redis:6379/0 NVD_API_KEY: ${NVD_API_KEY:-} GITHUB_TOKEN: ${GITHUB_TOKEN} OPENAI_API_KEY: ${OPENAI_API_KEY:-} ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} OLLAMA_BASE_URL: ${OLLAMA_BASE_URL:-http://ollama:11434} LLM_PROVIDER: ${LLM_PROVIDER:-ollama} LLM_MODEL: ${LLM_MODEL:-llama3.2} LLM_ENABLED: ${LLM_ENABLED:-true} FINETUNED_MODEL_PATH: ${FINETUNED_MODEL_PATH:-/app/models/sigma_llama_finetuned} HUGGING_FACE_TOKEN: ${HUGGING_FACE_TOKEN} depends_on: db: condition: service_healthy redis: condition: service_started celery-worker: condition: service_healthy volumes: - ./backend:/app - ./github_poc_collector:/github_poc_collector - ./exploit-db-mirror:/app/exploit-db-mirror - ./models:/app/models restart: unless-stopped flower: build: ./backend command: celery -A celery_config flower --port=5555 ports: - "5555:5555" environment: CELERY_BROKER_URL: redis://redis:6379/0 CELERY_RESULT_BACKEND: redis://redis:6379/0 depends_on: redis: condition: service_started celery-worker: condition: service_healthy restart: unless-stopped volumes: postgres_data: redis_data: ollama_data: