Files
breakpilot-compliance/backend-compliance/compliance/api/evidence_check_routes.py
Benjamin Admin 49ce417428
All checks were successful
CI/CD / go-lint (push) Has been skipped
CI/CD / python-lint (push) Has been skipped
CI/CD / nodejs-lint (push) Has been skipped
CI/CD / test-go-ai-compliance (push) Successful in 32s
CI/CD / test-python-backend-compliance (push) Successful in 34s
CI/CD / test-python-document-crawler (push) Successful in 23s
CI/CD / test-python-dsms-gateway (push) Successful in 21s
CI/CD / validate-canonical-controls (push) Successful in 11s
CI/CD / Deploy (push) Successful in 2s
feat: add compliance modules 2-5 (dashboard, security templates, process manager, evidence collector)
Module 2: Extended Compliance Dashboard with roadmap, module-status, next-actions, snapshots, score-history
Module 3: 7 German security document templates (IT-Sicherheitskonzept, Datenschutz, Backup, Logging, Incident-Response, Zugriff, Risikomanagement)
Module 4: Compliance Process Manager with CRUD, complete/skip/seed, ~50 seed tasks, 3-tab UI
Module 5: Evidence Collector Extended with automated checks, control-mapping, coverage report, 4-tab UI

Also includes: canonical control library enhancements (verification method, categories, dedup), control generator improvements, RAG client extensions

52 tests pass, frontend builds clean.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-14 21:03:04 +01:00

1152 lines
40 KiB
Python

"""
FastAPI routes for Evidence Checks — automated compliance verification.
Endpoints:
GET /evidence-checks — list checks (filter: check_type, is_active, frequency)
POST /evidence-checks — create check definition
GET /evidence-checks/{id} — single check with last 5 results
PUT /evidence-checks/{id} — update check
DELETE /evidence-checks/{id} — delete check (204)
POST /evidence-checks/{id}/run — execute check now
GET /evidence-checks/{id}/results — result history
POST /evidence-checks/run-due — run all due checks
POST /evidence-checks/seed — seed standard check definitions
GET /evidence-checks/mappings — list evidence-control mappings
POST /evidence-checks/mappings — create mapping
DELETE /evidence-checks/mappings/{id} — delete mapping (204)
GET /evidence-checks/mappings/by-control/{code} — evidence for a control
GET /evidence-checks/mappings/report — coverage report
"""
import json
import logging
import ssl
import socket
import time
from datetime import datetime, timedelta
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
from pydantic import BaseModel
from sqlalchemy import text
from sqlalchemy.orm import Session
from classroom_engine.database import get_db
from .tenant_utils import get_tenant_id as _get_tenant_id
from .db_utils import row_to_dict as _row_to_dict
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/evidence-checks", tags=["evidence-checks"])
# =============================================================================
# Validation sets
# =============================================================================
VALID_CHECK_TYPES = {
"tls_scan", "header_check", "certificate_check",
"config_scan", "api_scan", "dns_check", "port_scan",
}
VALID_FREQUENCIES = {"daily", "weekly", "monthly", "quarterly", "manual"}
FREQUENCY_DELTAS = {
"daily": timedelta(days=1),
"weekly": timedelta(weeks=1),
"monthly": timedelta(days=30),
"quarterly": timedelta(days=90),
"manual": None,
}
JSONB_FIELDS = {"target_config", "linked_control_ids"}
# =============================================================================
# Pydantic Schemas
# =============================================================================
class EvidenceCheckCreate(BaseModel):
check_code: str
title: str
description: Optional[str] = None
check_type: str
target_url: Optional[str] = None
target_config: Optional[dict] = {}
linked_control_ids: Optional[List] = []
frequency: str = "monthly"
is_active: bool = True
class EvidenceCheckUpdate(BaseModel):
check_code: Optional[str] = None
title: Optional[str] = None
description: Optional[str] = None
check_type: Optional[str] = None
target_url: Optional[str] = None
target_config: Optional[dict] = None
linked_control_ids: Optional[List] = None
frequency: Optional[str] = None
is_active: Optional[bool] = None
class EvidenceControlMapCreate(BaseModel):
evidence_id: str
control_code: str
mapping_type: str = "supports"
notes: Optional[str] = None
# =============================================================================
# Check execution helpers
# =============================================================================
async def _run_tls_scan(target_url: str, config: dict) -> dict:
"""Check TLS version, cipher suite, certificate validity."""
try:
from urllib.parse import urlparse
parsed = urlparse(target_url)
hostname = parsed.hostname or target_url
port = parsed.port or 443
context = ssl.create_default_context()
start = time.time()
with socket.create_connection((hostname, port), timeout=10) as sock:
with context.wrap_socket(sock, server_hostname=hostname) as ssock:
cert = ssock.getpeercert()
cipher = ssock.cipher()
version = ssock.version()
duration = int((time.time() - start) * 1000)
# Check cert expiry
not_after = datetime.strptime(cert['notAfter'], '%b %d %H:%M:%S %Y %Z')
days_until_expiry = (not_after - datetime.utcnow()).days
findings = []
critical = 0
if version in ('TLSv1', 'TLSv1.1'):
findings.append({"severity": "critical", "finding": f"Veraltete TLS-Version: {version}"})
critical += 1
if days_until_expiry < 30:
findings.append({"severity": "warning", "finding": f"Zertifikat laeuft in {days_until_expiry} Tagen ab"})
if days_until_expiry < 0:
findings.append({"severity": "critical", "finding": "Zertifikat abgelaufen"})
critical += 1
status = "passed" if not findings else ("failed" if critical > 0 else "warning")
return {
"run_status": status,
"result_data": {
"tls_version": version,
"cipher": cipher[0] if cipher else None,
"cert_expiry": not_after.isoformat(),
"days_until_expiry": days_until_expiry,
"findings": findings,
},
"summary": f"TLS {version}, Zertifikat gueltig bis {not_after.strftime('%d.%m.%Y')}",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_header_check(target_url: str, config: dict) -> dict:
"""Check security headers on the target URL."""
try:
import httpx
required_headers = {
"strict-transport-security": "HSTS",
"x-frame-options": "X-Frame-Options",
"x-content-type-options": "X-Content-Type-Options",
"content-security-policy": "Content-Security-Policy",
"referrer-policy": "Referrer-Policy",
"permissions-policy": "Permissions-Policy",
}
start = time.time()
async with httpx.AsyncClient(verify=False, timeout=10) as client:
resp = await client.get(target_url)
duration = int((time.time() - start) * 1000)
findings = []
critical = 0
present = []
missing = []
for header_key, display_name in required_headers.items():
val = resp.headers.get(header_key)
if val:
present.append(display_name)
else:
missing.append(display_name)
severity = "critical" if header_key == "strict-transport-security" else "warning"
findings.append({"severity": severity, "finding": f"Fehlender Header: {display_name}"})
if severity == "critical":
critical += 1
status = "passed" if not findings else ("failed" if critical > 0 else "warning")
return {
"run_status": status,
"result_data": {
"status_code": resp.status_code,
"present_headers": present,
"missing_headers": missing,
"findings": findings,
},
"summary": f"{len(present)}/{len(required_headers)} Security-Header vorhanden",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_certificate_check(target_url: str, config: dict) -> dict:
"""Check certificate chain, key length, SAN."""
try:
from urllib.parse import urlparse
parsed = urlparse(target_url)
hostname = parsed.hostname or target_url
port = parsed.port or 443
context = ssl.create_default_context()
start = time.time()
with socket.create_connection((hostname, port), timeout=10) as sock:
with context.wrap_socket(sock, server_hostname=hostname) as ssock:
cert = ssock.getpeercert()
der_cert = ssock.getpeercert(binary_form=True)
duration = int((time.time() - start) * 1000)
subject = dict(x[0] for x in cert.get("subject", ()))
issuer = dict(x[0] for x in cert.get("issuer", ()))
san_entries = [entry[1] for entry in cert.get("subjectAltName", ())]
not_after = datetime.strptime(cert['notAfter'], '%b %d %H:%M:%S %Y %Z')
not_before = datetime.strptime(cert['notBefore'], '%b %d %H:%M:%S %Y %Z')
days_until_expiry = (not_after - datetime.utcnow()).days
findings = []
critical = 0
if days_until_expiry < 0:
findings.append({"severity": "critical", "finding": "Zertifikat abgelaufen"})
critical += 1
elif days_until_expiry < 14:
findings.append({"severity": "critical", "finding": f"Zertifikat laeuft in {days_until_expiry} Tagen ab"})
critical += 1
elif days_until_expiry < 30:
findings.append({"severity": "warning", "finding": f"Zertifikat laeuft in {days_until_expiry} Tagen ab"})
if hostname not in san_entries and f"*.{'.'.join(hostname.split('.')[1:])}" not in san_entries:
findings.append({"severity": "warning", "finding": f"Hostname {hostname} nicht in SAN enthalten"})
# Key size from DER cert length as rough heuristic
key_bits = len(der_cert) * 8 // 10 # rough estimate
if key_bits < 2048:
findings.append({"severity": "warning", "finding": f"Schluessellaenge moeglicherweise zu kurz"})
status = "passed" if not findings else ("failed" if critical > 0 else "warning")
return {
"run_status": status,
"result_data": {
"subject": subject,
"issuer": issuer,
"san": san_entries,
"not_before": not_before.isoformat(),
"not_after": not_after.isoformat(),
"days_until_expiry": days_until_expiry,
"serial_number": cert.get("serialNumber"),
"findings": findings,
},
"summary": f"Zertifikat von {issuer.get('organizationName', 'Unknown')}, gueltig bis {not_after.strftime('%d.%m.%Y')}",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_dns_check(target_url: str, config: dict) -> dict:
"""Basic DNS resolution check."""
try:
from urllib.parse import urlparse
parsed = urlparse(target_url)
hostname = parsed.hostname or target_url
start = time.time()
results = socket.getaddrinfo(hostname, None)
duration = int((time.time() - start) * 1000)
addresses = list(set(r[4][0] for r in results))
families = list(set(r[0].name for r in results))
findings = []
if not any("AF_INET6" in f for f in families):
findings.append({"severity": "info", "finding": "Kein IPv6 (AAAA) Record gefunden"})
return {
"run_status": "passed" if addresses else "failed",
"result_data": {
"hostname": hostname,
"addresses": addresses,
"address_families": families,
"record_count": len(addresses),
"findings": findings,
},
"summary": f"DNS aufgeloest: {len(addresses)} Adresse(n) fuer {hostname}",
"findings_count": len(findings),
"critical_findings": 0,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"DNS-Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_api_scan(target_url: str, config: dict) -> dict:
"""Check that health endpoints respond with 200."""
try:
import httpx
start = time.time()
async with httpx.AsyncClient(verify=False, timeout=10) as client:
resp = await client.get(target_url)
duration = int((time.time() - start) * 1000)
findings = []
critical = 0
if resp.status_code != 200:
findings.append({"severity": "critical", "finding": f"HTTP {resp.status_code} statt 200"})
critical += 1
try:
body = resp.json()
except Exception:
body = resp.text[:500]
return {
"run_status": "passed" if resp.status_code == 200 else "failed",
"result_data": {
"status_code": resp.status_code,
"response_body": body,
"response_time_ms": duration,
"findings": findings,
},
"summary": f"HTTP {resp.status_code}, Antwortzeit {duration}ms",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_config_scan(target_url: str, config: dict) -> dict:
"""Check a config endpoint returns expected keys."""
try:
import httpx
expected_keys = config.get("expected_keys", [])
start = time.time()
async with httpx.AsyncClient(verify=False, timeout=10) as client:
resp = await client.get(target_url)
duration = int((time.time() - start) * 1000)
findings = []
critical = 0
if resp.status_code != 200:
findings.append({"severity": "critical", "finding": f"Endpunkt nicht erreichbar: HTTP {resp.status_code}"})
critical += 1
return {
"run_status": "failed",
"result_data": {"status_code": resp.status_code, "findings": findings},
"summary": f"Endpunkt nicht erreichbar: HTTP {resp.status_code}",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
try:
body = resp.json()
except Exception:
findings.append({"severity": "critical", "finding": "Antwort ist kein gueltiges JSON"})
return {
"run_status": "failed",
"result_data": {"findings": findings},
"summary": "Ungueltige JSON-Antwort",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": duration,
}
present_keys = []
missing_keys = []
if expected_keys and isinstance(body, dict):
for key in expected_keys:
if key in body:
present_keys.append(key)
else:
missing_keys.append(key)
findings.append({"severity": "warning", "finding": f"Fehlender Config-Key: {key}"})
status = "passed" if not findings else ("failed" if critical > 0 else "warning")
return {
"run_status": status,
"result_data": {
"status_code": resp.status_code,
"present_keys": present_keys,
"missing_keys": missing_keys,
"findings": findings,
},
"summary": f"{len(present_keys)}/{len(expected_keys)} erwartete Keys vorhanden" if expected_keys else "Config erreichbar",
"findings_count": len(findings),
"critical_findings": critical,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
async def _run_port_scan(target_url: str, config: dict) -> dict:
"""Check common ports — only open/closed status."""
try:
from urllib.parse import urlparse
parsed = urlparse(target_url)
hostname = parsed.hostname or target_url
common_ports = config.get("ports", [80, 443, 22, 3306, 5432, 6379, 8080])
start = time.time()
port_results = []
open_ports = []
closed_ports = []
for port in common_ports:
try:
with socket.create_connection((hostname, port), timeout=3) as sock:
port_results.append({"port": port, "status": "open"})
open_ports.append(port)
except (socket.timeout, ConnectionRefusedError, OSError):
port_results.append({"port": port, "status": "closed"})
closed_ports.append(port)
duration = int((time.time() - start) * 1000)
findings = []
# Flag unexpected open ports
expected_open = config.get("expected_open", [80, 443])
unexpected = [p for p in open_ports if p not in expected_open]
for p in unexpected:
findings.append({"severity": "warning", "finding": f"Unerwarteter offener Port: {p}"})
status = "passed" if not findings else "warning"
return {
"run_status": status,
"result_data": {
"hostname": hostname,
"ports": port_results,
"open_ports": open_ports,
"closed_ports": closed_ports,
"findings": findings,
},
"summary": f"{len(open_ports)} offene, {len(closed_ports)} geschlossene Ports",
"findings_count": len(findings),
"critical_findings": 0,
"duration_ms": duration,
}
except Exception as e:
return {
"run_status": "error",
"result_data": {"error": str(e)},
"summary": f"Fehler: {str(e)}",
"findings_count": 1,
"critical_findings": 1,
"duration_ms": 0,
}
# Dispatcher
CHECK_RUNNERS = {
"tls_scan": _run_tls_scan,
"header_check": _run_header_check,
"certificate_check": _run_certificate_check,
"dns_check": _run_dns_check,
"api_scan": _run_api_scan,
"config_scan": _run_config_scan,
"port_scan": _run_port_scan,
}
# =============================================================================
# Routes — Check Definitions
# =============================================================================
@router.get("")
async def list_checks(
check_type: Optional[str] = Query(None),
is_active: Optional[bool] = Query(None),
frequency: Optional[str] = Query(None),
limit: int = Query(100, ge=1, le=500),
offset: int = Query(0, ge=0),
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""List evidence checks with optional filters."""
where_clauses = ["tenant_id = :tenant_id"]
params: Dict[str, Any] = {"tenant_id": tenant_id, "limit": limit, "offset": offset}
if check_type:
where_clauses.append("check_type = :check_type")
params["check_type"] = check_type
if is_active is not None:
where_clauses.append("is_active = :is_active")
params["is_active"] = is_active
if frequency:
where_clauses.append("frequency = :frequency")
params["frequency"] = frequency
where_sql = " AND ".join(where_clauses)
total_row = db.execute(
text(f"SELECT COUNT(*) FROM compliance_evidence_checks WHERE {where_sql}"),
params,
).fetchone()
total = total_row[0] if total_row else 0
rows = db.execute(
text(f"""
SELECT * FROM compliance_evidence_checks
WHERE {where_sql}
ORDER BY created_at DESC
LIMIT :limit OFFSET :offset
"""),
params,
).fetchall()
return {"checks": [_row_to_dict(r) for r in rows], "total": total}
@router.post("", status_code=201)
async def create_check(
body: EvidenceCheckCreate,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Create a new evidence check definition."""
if body.check_type not in VALID_CHECK_TYPES:
raise HTTPException(400, f"Ungueltiger check_type: {body.check_type}. Erlaubt: {sorted(VALID_CHECK_TYPES)}")
if body.frequency not in VALID_FREQUENCIES:
raise HTTPException(400, f"Ungueltige frequency: {body.frequency}. Erlaubt: {sorted(VALID_FREQUENCIES)}")
row = db.execute(
text("""
INSERT INTO compliance_evidence_checks
(tenant_id, check_code, title, description, check_type,
target_url, target_config, linked_control_ids, frequency, is_active)
VALUES
(:tenant_id, :check_code, :title, :description, :check_type,
:target_url, CAST(:target_config AS jsonb), CAST(:linked_control_ids AS jsonb),
:frequency, :is_active)
RETURNING *
"""),
{
"tenant_id": tenant_id,
"check_code": body.check_code,
"title": body.title,
"description": body.description,
"check_type": body.check_type,
"target_url": body.target_url,
"target_config": json.dumps(body.target_config or {}),
"linked_control_ids": json.dumps(body.linked_control_ids or []),
"frequency": body.frequency,
"is_active": body.is_active,
},
).fetchone()
db.commit()
return _row_to_dict(row)
@router.get("/mappings")
async def list_mappings(
control_code: Optional[str] = Query(None),
evidence_id: Optional[str] = Query(None),
limit: int = Query(100, ge=1, le=500),
offset: int = Query(0, ge=0),
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""List evidence-control mappings."""
where_clauses = ["tenant_id = :tenant_id"]
params: Dict[str, Any] = {"tenant_id": tenant_id, "limit": limit, "offset": offset}
if control_code:
where_clauses.append("control_code = :control_code")
params["control_code"] = control_code
if evidence_id:
where_clauses.append("evidence_id = CAST(:evidence_id AS uuid)")
params["evidence_id"] = evidence_id
where_sql = " AND ".join(where_clauses)
rows = db.execute(
text(f"""
SELECT * FROM compliance_evidence_control_map
WHERE {where_sql}
ORDER BY created_at DESC
LIMIT :limit OFFSET :offset
"""),
params,
).fetchall()
return {"mappings": [_row_to_dict(r) for r in rows]}
@router.post("/mappings", status_code=201)
async def create_mapping(
body: EvidenceControlMapCreate,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Create an evidence-control mapping."""
valid_mapping_types = {"supports", "partially_supports", "required"}
if body.mapping_type not in valid_mapping_types:
raise HTTPException(400, f"Ungueltiger mapping_type: {body.mapping_type}")
row = db.execute(
text("""
INSERT INTO compliance_evidence_control_map
(tenant_id, evidence_id, control_code, mapping_type, notes)
VALUES
(:tenant_id, CAST(:evidence_id AS uuid), :control_code, :mapping_type, :notes)
ON CONFLICT (tenant_id, evidence_id, control_code) DO NOTHING
RETURNING *
"""),
{
"tenant_id": tenant_id,
"evidence_id": body.evidence_id,
"control_code": body.control_code,
"mapping_type": body.mapping_type,
"notes": body.notes,
},
).fetchone()
db.commit()
if not row:
raise HTTPException(409, "Mapping existiert bereits")
return _row_to_dict(row)
@router.delete("/mappings/{mapping_id}", status_code=204)
async def delete_mapping(
mapping_id: str,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Delete an evidence-control mapping."""
result = db.execute(
text("""
DELETE FROM compliance_evidence_control_map
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
"""),
{"id": mapping_id, "tenant_id": tenant_id},
)
db.commit()
if result.rowcount == 0:
raise HTTPException(404, "Mapping nicht gefunden")
return None
@router.get("/mappings/by-control/{code}")
async def mappings_by_control(
code: str,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Get all evidence mapped to a specific control."""
rows = db.execute(
text("""
SELECT * FROM compliance_evidence_control_map
WHERE tenant_id = :tenant_id AND control_code = :code
ORDER BY created_at DESC
"""),
{"tenant_id": tenant_id, "code": code},
).fetchall()
return {"control_code": code, "mappings": [_row_to_dict(r) for r in rows]}
@router.get("/mappings/report")
async def mappings_report(
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Coverage report: controls with/without evidence."""
# Get all distinct control codes from canonical controls
all_controls = db.execute(
text("""
SELECT DISTINCT control_code FROM compliance_canonical_controls
WHERE tenant_id = :tenant_id
"""),
{"tenant_id": tenant_id},
).fetchall()
total_controls = len(all_controls)
all_codes = {r[0] for r in all_controls}
# Get controls that have evidence
covered = db.execute(
text("""
SELECT DISTINCT control_code FROM compliance_evidence_control_map
WHERE tenant_id = :tenant_id
"""),
{"tenant_id": tenant_id},
).fetchall()
covered_codes = {r[0] for r in covered}
controls_with_evidence = len(covered_codes & all_codes)
controls_without = total_controls - controls_with_evidence
coverage_pct = round((controls_with_evidence / total_controls * 100), 1) if total_controls > 0 else 0.0
return {
"total_controls": total_controls,
"controls_with_evidence": controls_with_evidence,
"controls_without_evidence": controls_without,
"coverage_percentage": coverage_pct,
"uncovered_controls": sorted(all_codes - covered_codes),
}
@router.post("/seed")
async def seed_checks(
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Seed standard check definitions."""
seeds = [
("TLS-SCAN-001", "TLS-Scan Hauptwebseite", "tls_scan", "monthly",
"Prueft TLS-Version, Cipher-Suite und Zertifikatsgueltigkeit der Hauptwebseite"),
("TLS-SCAN-002", "TLS-Scan API-Server", "tls_scan", "monthly",
"Prueft TLS-Version, Cipher-Suite und Zertifikatsgueltigkeit des API-Servers"),
("HEADER-001", "Security-Header-Check Webseite", "header_check", "monthly",
"Prueft Sicherheitsheader (HSTS, CSP, X-Frame-Options) der Hauptwebseite"),
("HEADER-002", "Security-Header-Check API", "header_check", "monthly",
"Prueft Sicherheitsheader des API-Servers"),
("CERT-001", "Zertifikat-Check Hauptdomain", "certificate_check", "weekly",
"Prueft Zertifikatskette, Schluessellaenge und SAN der Hauptdomain"),
("CERT-002", "Zertifikat-Check API-Domain", "certificate_check", "weekly",
"Prueft Zertifikatskette, Schluessellaenge und SAN der API-Domain"),
("DNS-001", "DNS-Check Hauptdomain", "dns_check", "monthly",
"DNS-Aufloesung und Record-Pruefung der Hauptdomain"),
("API-HEALTH-001", "API Health-Check Backend", "api_scan", "daily",
"Prueft ob der Backend-Health-Endpoint HTTP 200 liefert"),
("API-HEALTH-002", "API Health-Check SDK", "api_scan", "daily",
"Prueft ob der SDK-Health-Endpoint HTTP 200 liefert"),
("API-HEALTH-003", "API Health-Check Frontend", "api_scan", "daily",
"Prueft ob der Frontend-Health-Endpoint HTTP 200 liefert"),
("PORT-SCAN-001", "Port-Scan Webserver", "port_scan", "quarterly",
"Prueft offene/geschlossene Ports des Webservers"),
("PORT-SCAN-002", "Port-Scan Datenbankserver", "port_scan", "quarterly",
"Prueft offene/geschlossene Ports des Datenbankservers"),
("CONFIG-001", "Konfiguration-Check Backend", "config_scan", "monthly",
"Prueft ob Backend-Konfiguration erwartete Keys enthaelt"),
("CONFIG-002", "Konfiguration-Check SDK", "config_scan", "monthly",
"Prueft ob SDK-Konfiguration erwartete Keys enthaelt"),
("HEADER-003", "CORS-Header-Check API", "header_check", "quarterly",
"Prueft CORS-Header-Konfiguration des API-Servers"),
]
created = 0
for code, title, ctype, freq, desc in seeds:
result = db.execute(
text("""
INSERT INTO compliance_evidence_checks
(tenant_id, check_code, title, description, check_type, frequency)
VALUES
(:tenant_id, :code, :title, :desc, :ctype, :freq)
ON CONFLICT (tenant_id, project_id, check_code) DO NOTHING
"""),
{
"tenant_id": tenant_id,
"code": code,
"title": title,
"desc": desc,
"ctype": ctype,
"freq": freq,
},
)
created += result.rowcount
db.commit()
return {"seeded": created, "total_definitions": len(seeds)}
@router.post("/run-due")
async def run_due_checks(
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Run all checks where next_run_at <= NOW() and is_active=true."""
rows = db.execute(
text("""
SELECT * FROM compliance_evidence_checks
WHERE tenant_id = :tenant_id
AND is_active = true
AND (next_run_at IS NULL OR next_run_at <= NOW())
ORDER BY created_at
"""),
{"tenant_id": tenant_id},
).fetchall()
results_summary = {"total": len(rows), "passed": 0, "failed": 0, "warning": 0, "error": 0}
for row in rows:
check = _row_to_dict(row)
check_id = check["id"]
check_type = check["check_type"]
target_url = check.get("target_url") or ""
target_config = check.get("target_config") or {}
runner = CHECK_RUNNERS.get(check_type)
if not runner:
continue
# Insert running result
result_row = db.execute(
text("""
INSERT INTO compliance_evidence_check_results
(check_id, tenant_id, run_status)
VALUES
(CAST(:check_id AS uuid), :tenant_id, 'running')
RETURNING id
"""),
{"check_id": check_id, "tenant_id": tenant_id},
).fetchone()
db.commit()
result_id = result_row[0] if result_row else None
# Run
run_result = await runner(target_url, target_config)
# Update result
if result_id:
db.execute(
text("""
UPDATE compliance_evidence_check_results
SET run_status = :run_status,
result_data = CAST(:result_data AS jsonb),
summary = :summary,
findings_count = :findings_count,
critical_findings = :critical_findings,
duration_ms = :duration_ms
WHERE id = CAST(:id AS uuid)
"""),
{
"id": str(result_id),
"run_status": run_result["run_status"],
"result_data": json.dumps(run_result["result_data"]),
"summary": run_result["summary"],
"findings_count": run_result["findings_count"],
"critical_findings": run_result["critical_findings"],
"duration_ms": run_result["duration_ms"],
},
)
# Update check timestamps
delta = FREQUENCY_DELTAS.get(check.get("frequency", "monthly"))
next_run = (datetime.utcnow() + delta).isoformat() if delta else None
db.execute(
text("""
UPDATE compliance_evidence_checks
SET last_run_at = NOW(),
next_run_at = CAST(:next_run AS timestamptz),
updated_at = NOW()
WHERE id = CAST(:id AS uuid)
"""),
{"id": check_id, "next_run": next_run},
)
db.commit()
results_summary[run_result["run_status"]] = results_summary.get(run_result["run_status"], 0) + 1
return results_summary
@router.get("/{check_id}")
async def get_check(
check_id: str,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Get single check with last 5 results."""
row = db.execute(
text("""
SELECT * FROM compliance_evidence_checks
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
"""),
{"id": check_id, "tenant_id": tenant_id},
).fetchone()
if not row:
raise HTTPException(404, "Check nicht gefunden")
check = _row_to_dict(row)
# Last 5 results
result_rows = db.execute(
text("""
SELECT * FROM compliance_evidence_check_results
WHERE check_id = CAST(:check_id AS uuid)
ORDER BY run_at DESC
LIMIT 5
"""),
{"check_id": check_id},
).fetchall()
check["recent_results"] = [_row_to_dict(r) for r in result_rows]
return check
@router.put("/{check_id}")
async def update_check(
check_id: str,
body: EvidenceCheckUpdate,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Update a check definition."""
# Validate if provided
if body.check_type and body.check_type not in VALID_CHECK_TYPES:
raise HTTPException(400, f"Ungueltiger check_type: {body.check_type}")
if body.frequency and body.frequency not in VALID_FREQUENCIES:
raise HTTPException(400, f"Ungueltige frequency: {body.frequency}")
# Build dynamic SET
updates = body.model_dump(exclude_unset=True)
if not updates:
raise HTTPException(400, "Keine Felder zum Aktualisieren")
set_parts = []
params: Dict[str, Any] = {"id": check_id, "tenant_id": tenant_id}
for key, val in updates.items():
if key in JSONB_FIELDS:
set_parts.append(f"{key} = CAST(:{key} AS jsonb)")
params[key] = json.dumps(val)
else:
set_parts.append(f"{key} = :{key}")
params[key] = val
set_parts.append("updated_at = NOW()")
set_sql = ", ".join(set_parts)
row = db.execute(
text(f"""
UPDATE compliance_evidence_checks
SET {set_sql}
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
RETURNING *
"""),
params,
).fetchone()
db.commit()
if not row:
raise HTTPException(404, "Check nicht gefunden")
return _row_to_dict(row)
@router.delete("/{check_id}", status_code=204)
async def delete_check(
check_id: str,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Delete a check definition (cascades to results)."""
result = db.execute(
text("""
DELETE FROM compliance_evidence_checks
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
"""),
{"id": check_id, "tenant_id": tenant_id},
)
db.commit()
if result.rowcount == 0:
raise HTTPException(404, "Check nicht gefunden")
return None
@router.post("/{check_id}/run")
async def run_check(
check_id: str,
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Execute a specific check now."""
# Load check
row = db.execute(
text("""
SELECT * FROM compliance_evidence_checks
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
"""),
{"id": check_id, "tenant_id": tenant_id},
).fetchone()
if not row:
raise HTTPException(404, "Check nicht gefunden")
check = _row_to_dict(row)
check_type = check["check_type"]
target_url = check.get("target_url") or ""
target_config = check.get("target_config") or {}
runner = CHECK_RUNNERS.get(check_type)
if not runner:
raise HTTPException(400, f"Kein Runner fuer check_type: {check_type}")
# Insert running result
result_row = db.execute(
text("""
INSERT INTO compliance_evidence_check_results
(check_id, tenant_id, run_status)
VALUES
(CAST(:check_id AS uuid), :tenant_id, 'running')
RETURNING *
"""),
{"check_id": check_id, "tenant_id": tenant_id},
).fetchone()
db.commit()
result_id = str(result_row._mapping["id"]) if result_row else None
# Execute
run_result = await runner(target_url, target_config)
# Update result
if result_id:
updated_row = db.execute(
text("""
UPDATE compliance_evidence_check_results
SET run_status = :run_status,
result_data = CAST(:result_data AS jsonb),
summary = :summary,
findings_count = :findings_count,
critical_findings = :critical_findings,
duration_ms = :duration_ms
WHERE id = CAST(:id AS uuid)
RETURNING *
"""),
{
"id": result_id,
"run_status": run_result["run_status"],
"result_data": json.dumps(run_result["result_data"]),
"summary": run_result["summary"],
"findings_count": run_result["findings_count"],
"critical_findings": run_result["critical_findings"],
"duration_ms": run_result["duration_ms"],
},
).fetchone()
db.commit()
# Update check timestamps
delta = FREQUENCY_DELTAS.get(check.get("frequency", "monthly"))
next_run = (datetime.utcnow() + delta).isoformat() if delta else None
db.execute(
text("""
UPDATE compliance_evidence_checks
SET last_run_at = NOW(),
next_run_at = CAST(:next_run AS timestamptz),
updated_at = NOW()
WHERE id = CAST(:id AS uuid)
"""),
{"id": check_id, "next_run": next_run},
)
db.commit()
return _row_to_dict(updated_row) if updated_row else run_result
@router.get("/{check_id}/results")
async def list_results(
check_id: str,
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
db: Session = Depends(get_db),
tenant_id: str = Depends(_get_tenant_id),
):
"""Result history for a specific check."""
# Verify check exists
exists = db.execute(
text("""
SELECT 1 FROM compliance_evidence_checks
WHERE id = CAST(:id AS uuid) AND tenant_id = :tenant_id
"""),
{"id": check_id, "tenant_id": tenant_id},
).fetchone()
if not exists:
raise HTTPException(404, "Check nicht gefunden")
total_row = db.execute(
text("""
SELECT COUNT(*) FROM compliance_evidence_check_results
WHERE check_id = CAST(:check_id AS uuid)
"""),
{"check_id": check_id},
).fetchone()
total = total_row[0] if total_row else 0
rows = db.execute(
text("""
SELECT * FROM compliance_evidence_check_results
WHERE check_id = CAST(:check_id AS uuid)
ORDER BY run_at DESC
LIMIT :limit OFFSET :offset
"""),
{"check_id": check_id, "limit": limit, "offset": offset},
).fetchall()
return {"results": [_row_to_dict(r) for r in rows], "total": total}