- Extract database models from monolithic main.py (2,373 lines) into organized modules - Implement service layer pattern with dedicated business logic classes - Split API endpoints into modular FastAPI routers by functionality - Add centralized configuration management with environment variable handling - Create proper separation of concerns across data, service, and presentation layers **Architecture Changes:** - models/: SQLAlchemy database models (CVE, SigmaRule, RuleTemplate, BulkProcessingJob) - config/: Centralized settings and database configuration - services/: Business logic (CVEService, SigmaRuleService, GitHubExploitAnalyzer) - routers/: Modular API endpoints (cves, sigma_rules, bulk_operations, llm_operations) - schemas/: Pydantic request/response models **Key Improvements:** - 95% reduction in main.py size (2,373 → 120 lines) - Updated 15+ backend files with proper import structure - Eliminated circular dependencies and tight coupling - Enhanced testability with isolated service components - Better code organization for team collaboration **Backward Compatibility:** - All API endpoints maintain same URLs and behavior - Zero breaking changes to existing functionality - Database schema unchanged - Environment variables preserved 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
131 lines
No EOL
5 KiB
Python
131 lines
No EOL
5 KiB
Python
import re
|
|
import uuid
|
|
import requests
|
|
from datetime import datetime, timedelta
|
|
from typing import List, Optional
|
|
from sqlalchemy.orm import Session
|
|
|
|
import sys
|
|
import os
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
from models import CVE, SigmaRule, RuleTemplate
|
|
from config.settings import settings
|
|
|
|
|
|
class CVEService:
|
|
"""Service for managing CVE data and operations"""
|
|
|
|
def __init__(self, db: Session):
|
|
self.db = db
|
|
self.nvd_api_key = settings.NVD_API_KEY
|
|
|
|
async def fetch_recent_cves(self, days_back: int = 7):
|
|
"""Fetch recent CVEs from NVD API"""
|
|
end_date = datetime.utcnow()
|
|
start_date = end_date - timedelta(days=days_back)
|
|
|
|
url = settings.NVD_API_BASE_URL
|
|
params = {
|
|
"pubStartDate": start_date.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z",
|
|
"pubEndDate": end_date.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z",
|
|
"resultsPerPage": 100
|
|
}
|
|
|
|
headers = {}
|
|
if self.nvd_api_key:
|
|
headers["apiKey"] = self.nvd_api_key
|
|
|
|
try:
|
|
response = requests.get(url, params=params, headers=headers, timeout=30)
|
|
response.raise_for_status()
|
|
data = response.json()
|
|
|
|
new_cves = []
|
|
for vuln in data.get("vulnerabilities", []):
|
|
cve_data = vuln.get("cve", {})
|
|
cve_id = cve_data.get("id")
|
|
|
|
# Check if CVE already exists
|
|
existing = self.db.query(CVE).filter(CVE.cve_id == cve_id).first()
|
|
if existing:
|
|
continue
|
|
|
|
# Extract CVE information
|
|
description = ""
|
|
if cve_data.get("descriptions"):
|
|
description = cve_data["descriptions"][0].get("value", "")
|
|
|
|
cvss_score = None
|
|
severity = None
|
|
if cve_data.get("metrics", {}).get("cvssMetricV31"):
|
|
cvss_data = cve_data["metrics"]["cvssMetricV31"][0]
|
|
cvss_score = cvss_data.get("cvssData", {}).get("baseScore")
|
|
severity = cvss_data.get("cvssData", {}).get("baseSeverity")
|
|
|
|
affected_products = []
|
|
if cve_data.get("configurations"):
|
|
for config in cve_data["configurations"]:
|
|
for node in config.get("nodes", []):
|
|
for cpe_match in node.get("cpeMatch", []):
|
|
if cpe_match.get("vulnerable"):
|
|
affected_products.append(cpe_match.get("criteria", ""))
|
|
|
|
reference_urls = []
|
|
if cve_data.get("references"):
|
|
reference_urls = [ref.get("url", "") for ref in cve_data["references"]]
|
|
|
|
cve_obj = CVE(
|
|
cve_id=cve_id,
|
|
description=description,
|
|
cvss_score=cvss_score,
|
|
severity=severity,
|
|
published_date=datetime.fromisoformat(cve_data.get("published", "").replace("Z", "+00:00")),
|
|
modified_date=datetime.fromisoformat(cve_data.get("lastModified", "").replace("Z", "+00:00")),
|
|
affected_products=affected_products,
|
|
reference_urls=reference_urls
|
|
)
|
|
|
|
self.db.add(cve_obj)
|
|
new_cves.append(cve_obj)
|
|
|
|
self.db.commit()
|
|
return new_cves
|
|
|
|
except Exception as e:
|
|
print(f"Error fetching CVEs: {str(e)}")
|
|
return []
|
|
|
|
def get_cve_by_id(self, cve_id: str) -> Optional[CVE]:
|
|
"""Get CVE by ID"""
|
|
return self.db.query(CVE).filter(CVE.cve_id == cve_id).first()
|
|
|
|
def get_all_cves(self, limit: int = 100, offset: int = 0) -> List[CVE]:
|
|
"""Get all CVEs with pagination"""
|
|
return self.db.query(CVE).offset(offset).limit(limit).all()
|
|
|
|
def get_cve_stats(self) -> dict:
|
|
"""Get CVE statistics"""
|
|
total_cves = self.db.query(CVE).count()
|
|
high_severity = self.db.query(CVE).filter(CVE.cvss_score >= 7.0).count()
|
|
critical_severity = self.db.query(CVE).filter(CVE.cvss_score >= 9.0).count()
|
|
|
|
return {
|
|
"total_cves": total_cves,
|
|
"high_severity": high_severity,
|
|
"critical_severity": critical_severity
|
|
}
|
|
|
|
def update_cve_poc_data(self, cve_id: str, poc_data: dict) -> bool:
|
|
"""Update CVE with PoC data"""
|
|
try:
|
|
cve = self.get_cve_by_id(cve_id)
|
|
if cve:
|
|
cve.poc_data = poc_data
|
|
cve.poc_count = len(poc_data.get('pocs', []))
|
|
cve.updated_at = datetime.utcnow()
|
|
self.db.commit()
|
|
return True
|
|
return False
|
|
except Exception as e:
|
|
print(f"Error updating CVE PoC data: {str(e)}")
|
|
return False |