auto_sigma_rule_generator/.env.example

42 lines
1.6 KiB
Text

# NVD API Configuration
# Get your free API key at: https://nvd.nist.gov/developers/request-an-api-key
NVD_API_KEY=your_nvd_api_key_here
# GitHub API Configuration (Optional - for exploit analysis)
# Get your personal access token at: https://github.com/settings/tokens
# Only needs "public_repo" scope for searching public repositories
GITHUB_TOKEN=your_github_token_here
# LLM API Configuration (Optional - for enhanced SIGMA rule generation)
# Choose your preferred LLM provider and configure the corresponding API key
# OpenAI Configuration
# Get your API key at: https://platform.openai.com/api-keys
OPENAI_API_KEY=your_openai_api_key_here
# Anthropic Configuration
# Get your API key at: https://console.anthropic.com/
ANTHROPIC_API_KEY=your_anthropic_api_key_here
# Ollama Configuration (for local models)
# Install Ollama locally: https://ollama.ai/
OLLAMA_BASE_URL=http://localhost:11434
# LLM Provider Selection (optional - auto-detects if not specified)
# Options: openai, anthropic, ollama
LLM_PROVIDER=openai
# LLM Model Selection (optional - uses provider default if not specified)
# OpenAI: gpt-4o, gpt-4o-mini, gpt-4-turbo, gpt-3.5-turbo
# Anthropic: claude-3-5-sonnet-20241022, claude-3-haiku-20240307, claude-3-opus-20240229
# Ollama: llama3.2, codellama, mistral, llama2
LLM_MODEL=gpt-4o-mini
# Database Configuration (Docker Compose will use defaults)
# DATABASE_URL=postgresql://cve_user:cve_password@localhost:5432/cve_sigma_db
# Frontend Configuration
# REACT_APP_API_URL=http://localhost:8000
# Optional: Redis Configuration
# REDIS_URL=redis://localhost:6379