Files
breakpilot-compliance/backend-compliance/compliance/services/incident_service.py
Sharang Parnerkar cc1c61947d refactor(backend/api): extract Incident services (Step 4 — file 11 of 18)
compliance/api/incident_routes.py (916 LOC) -> 280 LOC thin routes +
two services + 95-line schemas file.

Two-service split for DSGVO Art. 33/34 Datenpannen-Management:

  incident_service.py (460 LOC):
    - CRUD (create, list, get, update, delete)
    - Stats, status update, timeline append, close
    - Module-level helpers: _calculate_risk_level, _is_notification_required,
      _calculate_72h_deadline, _incident_to_response, _measure_to_response,
      _parse_jsonb, _append_timeline, DEFAULT_TENANT_ID

  incident_workflow_service.py (329 LOC):
    - Risk assessment (likelihood x impact -> risk_level)
    - Art. 33 authority notification (with 72h deadline tracking)
    - Art. 34 data subject notification
    - Corrective measures CRUD

Both services use raw SQL via sqlalchemy.text() — no ORM models for
incident_incidents / incident_measures tables. Migrated from the Go
ai-compliance-sdk; Python backend is Source of Truth.

Legacy test compat: tests/test_incident_routes.py imports
_calculate_risk_level, _is_notification_required, _calculate_72h_deadline,
_incident_to_response, _measure_to_response, _parse_jsonb,
DEFAULT_TENANT_ID directly from compliance.api.incident_routes — all
re-exported via __all__.

Verified:
  - 223/223 pytest pass (173 core + 50 incident)
  - OpenAPI 360/484 unchanged
  - mypy compliance/ -> Success on 141 source files
  - incident_routes.py 916 -> 280 LOC
  - Hard-cap violations: 8 -> 7

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-09 08:35:57 +02:00

461 lines
16 KiB
Python

# mypy: disable-error-code="arg-type,assignment,union-attr,no-any-return"
"""
Incident service — CRUD + stats + status + timeline + close.
Phase 1 Step 4: extracted from ``compliance.api.incident_routes``. The
workflow side (risk assessment, Art. 33/34 notifications, measures) lives
in ``compliance.services.incident_workflow_service``.
Module-level helpers (_calculate_risk_level, _is_notification_required,
_calculate_72h_deadline, _incident_to_response, _measure_to_response,
_parse_jsonb) are shared by both service modules and re-exported from
``compliance.api.incident_routes`` for legacy test imports.
"""
import json
from datetime import datetime, timedelta, timezone
from typing import Any, Optional
from uuid import uuid4
from sqlalchemy import text
from sqlalchemy.orm import Session
from compliance.domain import NotFoundError, ValidationError
from compliance.schemas.incident import (
CloseIncidentRequest,
IncidentCreate,
IncidentUpdate,
StatusUpdate,
TimelineEntryRequest,
)
DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
# ============================================================================
# Module-level helpers (re-exported by compliance.api.incident_routes)
# ============================================================================
def _calculate_risk_level(likelihood: int, impact: int) -> str:
"""Calculate risk level from likelihood * impact score."""
score = likelihood * impact
if score >= 20:
return "critical"
if score >= 12:
return "high"
if score >= 6:
return "medium"
return "low"
def _is_notification_required(risk_level: str) -> bool:
"""DSGVO Art. 33 — notification required for critical/high risk."""
return risk_level in ("critical", "high")
def _calculate_72h_deadline(detected_at: datetime) -> str:
"""Calculate 72-hour DSGVO Art. 33 deadline."""
return (detected_at + timedelta(hours=72)).isoformat()
def _parse_jsonb(val: Any) -> Any:
"""Parse a JSONB field — already dict/list from psycopg or a JSON string."""
if val is None:
return None
if isinstance(val, (dict, list)):
return val
if isinstance(val, str):
try:
return json.loads(val)
except (json.JSONDecodeError, TypeError):
return val
return val
def _incident_to_response(row: Any) -> dict[str, Any]:
"""Convert a DB row (RowMapping) to incident response dict."""
r = dict(row)
for field in (
"risk_assessment", "authority_notification",
"data_subject_notification", "timeline",
"affected_data_categories", "affected_systems",
):
if field in r:
r[field] = _parse_jsonb(r[field])
for field in ("detected_at", "created_at", "updated_at", "closed_at"):
if field in r and r[field] is not None and hasattr(r[field], "isoformat"):
r[field] = r[field].isoformat()
return r
def _measure_to_response(row: Any) -> dict[str, Any]:
"""Convert a DB measure row to response dict."""
r = dict(row)
for field in ("due_date", "completed_at", "created_at", "updated_at"):
if field in r and r[field] is not None and hasattr(r[field], "isoformat"):
r[field] = r[field].isoformat()
return r
def _append_timeline(db: Session, incident_id: str, entry: dict[str, Any]) -> None:
"""Append a timeline entry to the incident's timeline JSONB array."""
db.execute(
text(
"UPDATE incident_incidents "
"SET timeline = COALESCE(timeline, '[]'::jsonb) || :entry::jsonb, "
"updated_at = NOW() WHERE id = :id"
),
{"id": incident_id, "entry": json.dumps(entry)},
)
# ============================================================================
# Service
# ============================================================================
class IncidentService:
"""CRUD + stats + status + timeline + close."""
def __init__(self, db: Session) -> None:
self.db = db
def _require_exists(self, iid: str) -> None:
row = self.db.execute(
text("SELECT id FROM incident_incidents WHERE id = :id"),
{"id": iid},
).first()
if not row:
raise NotFoundError("incident not found")
def create(
self, tenant_id: str, user_id: str, body: IncidentCreate
) -> dict[str, Any]:
incident_id = str(uuid4())
now = datetime.now(timezone.utc)
detected_at = now
if body.detected_at:
try:
parsed = datetime.fromisoformat(body.detected_at.replace("Z", "+00:00"))
detected_at = parsed if parsed.tzinfo else parsed.replace(tzinfo=timezone.utc)
except (ValueError, AttributeError):
detected_at = now
deadline = detected_at + timedelta(hours=72)
authority_notification = {"status": "pending", "deadline": deadline.isoformat()}
data_subject_notification = {"required": False, "status": "not_required"}
timeline = [{
"timestamp": now.isoformat(),
"action": "incident_created",
"user_id": user_id,
"details": "Incident detected and reported",
}]
self.db.execute(text("""
INSERT INTO incident_incidents (
id, tenant_id, title, description, category, status, severity,
detected_at, reported_by,
affected_data_categories, affected_data_subject_count, affected_systems,
authority_notification, data_subject_notification, timeline,
created_at, updated_at
) VALUES (
:id, :tenant_id, :title, :description, :category, 'detected', :severity,
:detected_at, :reported_by,
CAST(:affected_data_categories AS jsonb),
:affected_data_subject_count,
CAST(:affected_systems AS jsonb),
CAST(:authority_notification AS jsonb),
CAST(:data_subject_notification AS jsonb),
CAST(:timeline AS jsonb),
:now, :now
)
"""), {
"id": incident_id,
"tenant_id": tenant_id,
"title": body.title,
"description": body.description or "",
"category": body.category,
"severity": body.severity,
"detected_at": detected_at.isoformat(),
"reported_by": user_id,
"affected_data_categories": json.dumps(body.affected_data_categories or []),
"affected_data_subject_count": body.affected_data_subject_count or 0,
"affected_systems": json.dumps(body.affected_systems or []),
"authority_notification": json.dumps(authority_notification),
"data_subject_notification": json.dumps(data_subject_notification),
"timeline": json.dumps(timeline),
"now": now.isoformat(),
})
self.db.commit()
row = self.db.execute(
text("SELECT * FROM incident_incidents WHERE id = :id"),
{"id": incident_id},
).mappings().first()
incident_resp = _incident_to_response(row) if row else {}
return {
"incident": incident_resp,
"authority_deadline": deadline.isoformat(),
"hours_until_deadline": (deadline - now).total_seconds() / 3600,
}
def list_incidents(
self,
tenant_id: str,
status: Optional[str],
severity: Optional[str],
category: Optional[str],
limit: int,
offset: int,
) -> dict[str, Any]:
where = ["tenant_id = :tenant_id"]
params: dict[str, Any] = {
"tenant_id": tenant_id, "limit": limit, "offset": offset,
}
if status:
where.append("status = :status")
params["status"] = status
if severity:
where.append("severity = :severity")
params["severity"] = severity
if category:
where.append("category = :category")
params["category"] = category
where_sql = " AND ".join(where)
total = (
self.db.execute(
text(f"SELECT COUNT(*) FROM incident_incidents WHERE {where_sql}"),
params,
).scalar() or 0
)
rows = (
self.db.execute(
text(
f"SELECT * FROM incident_incidents WHERE {where_sql} "
f"ORDER BY created_at DESC LIMIT :limit OFFSET :offset"
),
params,
)
.mappings()
.all()
)
return {
"incidents": [_incident_to_response(r) for r in rows],
"total": total,
}
def stats(self, tenant_id: str) -> dict[str, Any]:
row: Any = (
self.db.execute(
text("""
SELECT
COUNT(*) AS total,
SUM(CASE WHEN status != 'closed' THEN 1 ELSE 0 END) AS open,
SUM(CASE WHEN status = 'closed' THEN 1 ELSE 0 END) AS closed,
SUM(CASE WHEN severity = 'critical' THEN 1 ELSE 0 END) AS critical,
SUM(CASE WHEN severity = 'high' THEN 1 ELSE 0 END) AS high,
SUM(CASE WHEN severity = 'medium' THEN 1 ELSE 0 END) AS medium,
SUM(CASE WHEN severity = 'low' THEN 1 ELSE 0 END) AS low
FROM incident_incidents
WHERE tenant_id = :tenant_id
"""),
{"tenant_id": tenant_id},
)
.mappings()
.first()
) or {}
return {
"total": int(row["total"] or 0),
"open": int(row["open"] or 0),
"closed": int(row["closed"] or 0),
"by_severity": {
"critical": int(row["critical"] or 0),
"high": int(row["high"] or 0),
"medium": int(row["medium"] or 0),
"low": int(row["low"] or 0),
},
}
def get(self, incident_id: str) -> dict[str, Any]:
row = (
self.db.execute(
text("SELECT * FROM incident_incidents WHERE id = :id"),
{"id": incident_id},
)
.mappings()
.first()
)
if not row:
raise NotFoundError("incident not found")
incident = _incident_to_response(row)
measures = [
_measure_to_response(m)
for m in self.db.execute(
text("SELECT * FROM incident_measures WHERE incident_id = :id ORDER BY created_at"),
{"id": incident_id},
)
.mappings()
.all()
]
deadline_info = None
auth_notif = (
_parse_jsonb(row["authority_notification"])
if "authority_notification" in row.keys()
else None
)
if auth_notif and isinstance(auth_notif, dict) and "deadline" in auth_notif:
try:
deadline_dt = datetime.fromisoformat(
auth_notif["deadline"].replace("Z", "+00:00")
)
now = datetime.now(timezone.utc)
hours_remaining = (deadline_dt - now).total_seconds() / 3600
deadline_info = {
"deadline": auth_notif["deadline"],
"hours_remaining": hours_remaining,
"overdue": hours_remaining < 0,
}
except (ValueError, TypeError):
pass
return {"incident": incident, "measures": measures, "deadline_info": deadline_info}
def update(self, incident_id: str, body: IncidentUpdate) -> dict[str, Any]:
self._require_exists(incident_id)
updates: list[str] = []
params: dict[str, Any] = {"id": incident_id}
for field in ("title", "description", "category", "status", "severity"):
val = getattr(body, field, None)
if val is not None:
updates.append(f"{field} = :{field}")
params[field] = val
if body.affected_data_categories is not None:
updates.append("affected_data_categories = CAST(:adc AS jsonb)")
params["adc"] = json.dumps(body.affected_data_categories)
if body.affected_data_subject_count is not None:
updates.append("affected_data_subject_count = :adsc")
params["adsc"] = body.affected_data_subject_count
if body.affected_systems is not None:
updates.append("affected_systems = CAST(:asys AS jsonb)")
params["asys"] = json.dumps(body.affected_systems)
if not updates:
raise ValidationError("no fields to update")
updates.append("updated_at = NOW()")
self.db.execute(
text(f"UPDATE incident_incidents SET {', '.join(updates)} WHERE id = :id"),
params,
)
self.db.commit()
row = (
self.db.execute(
text("SELECT * FROM incident_incidents WHERE id = :id"),
{"id": incident_id},
)
.mappings()
.first()
)
return {"incident": _incident_to_response(row)}
def delete(self, incident_id: str) -> dict[str, Any]:
self._require_exists(incident_id)
self.db.execute(
text("DELETE FROM incident_measures WHERE incident_id = :id"),
{"id": incident_id},
)
self.db.execute(
text("DELETE FROM incident_incidents WHERE id = :id"),
{"id": incident_id},
)
self.db.commit()
return {"message": "incident deleted"}
def update_status(
self, incident_id: str, user_id: str, body: StatusUpdate
) -> dict[str, Any]:
self._require_exists(incident_id)
self.db.execute(
text(
"UPDATE incident_incidents SET status = :status, updated_at = NOW() "
"WHERE id = :id"
),
{"id": incident_id, "status": body.status},
)
_append_timeline(self.db, incident_id, {
"timestamp": datetime.now(timezone.utc).isoformat(),
"action": "status_changed",
"user_id": user_id,
"details": f"Status changed to {body.status}",
})
self.db.commit()
row = (
self.db.execute(
text("SELECT * FROM incident_incidents WHERE id = :id"),
{"id": incident_id},
)
.mappings()
.first()
)
return {"incident": _incident_to_response(row)}
def add_timeline(
self, incident_id: str, user_id: str, body: TimelineEntryRequest
) -> dict[str, Any]:
self._require_exists(incident_id)
now = datetime.now(timezone.utc)
entry = {
"timestamp": now.isoformat(),
"action": body.action,
"user_id": user_id,
"details": body.details or "",
}
_append_timeline(self.db, incident_id, entry)
self.db.commit()
return {"timeline_entry": entry}
def close(
self, incident_id: str, user_id: str, body: CloseIncidentRequest
) -> dict[str, Any]:
self._require_exists(incident_id)
now = datetime.now(timezone.utc)
self.db.execute(
text("""
UPDATE incident_incidents
SET status = 'closed',
root_cause = :root_cause,
lessons_learned = :lessons_learned,
closed_at = :now,
updated_at = :now
WHERE id = :id
"""),
{
"id": incident_id,
"root_cause": body.root_cause,
"lessons_learned": body.lessons_learned or "",
"now": now.isoformat(),
},
)
_append_timeline(self.db, incident_id, {
"timestamp": now.isoformat(),
"action": "incident_closed",
"user_id": user_id,
"details": f"Incident closed. Root cause: {body.root_cause}",
})
self.db.commit()
return {
"message": "incident closed",
"root_cause": body.root_cause,
"lessons_learned": body.lessons_learned or "",
}