- Create compliance/schemas/dsfa.py (161 LOC) — extract DSFACreate, DSFAUpdate, DSFAStatusUpdate, DSFASectionUpdate, DSFAApproveRequest - Create compliance/services/dsfa_service.py (386 LOC) — CRUD + helpers + stats + audit-log + CSV export; uses domain errors - Create compliance/services/dsfa_workflow_service.py (347 LOC) — status update, section update, submit-for-review, approve, export JSON, versions - Rewrite compliance/api/dsfa_routes.py (339 LOC) as thin handlers with Depends + translate_domain_errors(); re-export legacy symbols via __all__ - Add [mypy-compliance.api.dsfa_routes] ignore_errors = False to mypy.ini - Update tests: 422 -> 400 for domain ValidationError (6 assertions) - Regenerate OpenAPI baseline (360 paths / 484 operations — unchanged) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
387 lines
16 KiB
Python
387 lines
16 KiB
Python
# mypy: disable-error-code="arg-type,assignment,union-attr,no-any-return,call-overload,index,no-untyped-call"
|
|
"""
|
|
DSFA service — CRUD + helpers + stats + audit + CSV export.
|
|
|
|
Phase 1 Step 4: extracted from ``compliance.api.dsfa_routes``. The workflow
|
|
side (status update, section update, submit, approve, export, versions) lives
|
|
in ``compliance.services.dsfa_workflow_service``.
|
|
|
|
Module-level helpers (_dsfa_to_response, _get_tenant_id, _log_audit) are
|
|
shared by both service modules and re-exported from
|
|
``compliance.api.dsfa_routes`` for legacy test imports.
|
|
"""
|
|
|
|
import csv
|
|
import io
|
|
import json
|
|
import logging
|
|
from typing import Any, Optional
|
|
|
|
from sqlalchemy import text
|
|
from sqlalchemy.orm import Session
|
|
|
|
from compliance.domain import NotFoundError, ValidationError
|
|
from compliance.schemas.dsfa import DSFACreate, DSFAUpdate
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
|
VALID_STATUSES = {"draft", "in-review", "approved", "needs-update"}
|
|
VALID_RISK_LEVELS = {"low", "medium", "high", "critical"}
|
|
JSONB_FIELDS = {
|
|
"data_categories", "recipients", "measures", "data_subjects",
|
|
"affected_rights", "triggered_rule_codes", "ai_trigger_ids",
|
|
"wp248_criteria_met", "art35_abs3_triggered", "tom_references",
|
|
"risks", "mitigations", "stakeholder_consultations", "review_triggers",
|
|
"review_comments", "ai_use_case_modules", "threshold_analysis",
|
|
"consultation_requirement", "review_schedule", "section_progress",
|
|
"metadata",
|
|
}
|
|
|
|
# ---- Module-level helpers (re-exported by compliance.api.dsfa_routes) -----
|
|
|
|
|
|
def _get_tenant_id(tenant_id: Optional[str]) -> str:
|
|
return tenant_id or DEFAULT_TENANT_ID
|
|
|
|
|
|
def _parse_arr(val: Any) -> Any:
|
|
"""Parse a JSONB array field -> list."""
|
|
if val is None:
|
|
return []
|
|
if isinstance(val, list):
|
|
return val
|
|
if isinstance(val, str):
|
|
try:
|
|
parsed = json.loads(val)
|
|
return parsed if isinstance(parsed, list) else []
|
|
except Exception:
|
|
return []
|
|
return val
|
|
|
|
|
|
def _parse_obj(val: Any) -> Any:
|
|
"""Parse a JSONB object field -> dict."""
|
|
if val is None:
|
|
return {}
|
|
if isinstance(val, dict):
|
|
return val
|
|
if isinstance(val, str):
|
|
try:
|
|
parsed = json.loads(val)
|
|
return parsed if isinstance(parsed, dict) else {}
|
|
except Exception:
|
|
return {}
|
|
return val
|
|
|
|
|
|
def _ts(val: Any) -> Any:
|
|
"""Timestamp -> ISO string or None."""
|
|
if not val:
|
|
return None
|
|
return val if isinstance(val, str) else val.isoformat()
|
|
|
|
|
|
def _get(row: Any, key: str, default: Any = None) -> Any:
|
|
"""Safe row access — returns default if key missing."""
|
|
try:
|
|
v = row[key]
|
|
return default if v is None and default is not None else v
|
|
except (KeyError, IndexError):
|
|
return default
|
|
|
|
|
|
def _dsfa_to_response(row: Any) -> dict[str, Any]:
|
|
"""Convert a DB row to a JSON-serializable dict."""
|
|
g = lambda k, d=None: _get(row, k, d) # noqa: E731
|
|
prev = g("previous_version_id")
|
|
return {
|
|
"id": str(row["id"]),
|
|
"tenant_id": row["tenant_id"],
|
|
"title": row["title"],
|
|
"description": row["description"] or "",
|
|
"status": row["status"] or "draft",
|
|
"risk_level": row["risk_level"] or "low",
|
|
"processing_activity": row["processing_activity"] or "",
|
|
"data_categories": _parse_arr(row["data_categories"]),
|
|
"recipients": _parse_arr(row["recipients"]),
|
|
"measures": _parse_arr(row["measures"]),
|
|
"approved_by": row["approved_by"],
|
|
"approved_at": _ts(row["approved_at"]),
|
|
"created_by": row["created_by"] or "system",
|
|
"created_at": _ts(row["created_at"]),
|
|
"updated_at": _ts(row["updated_at"]),
|
|
"processing_description": g("processing_description"),
|
|
"processing_purpose": g("processing_purpose"),
|
|
"legal_basis": g("legal_basis"),
|
|
"legal_basis_details": g("legal_basis_details"),
|
|
"necessity_assessment": g("necessity_assessment"),
|
|
"proportionality_assessment": g("proportionality_assessment"),
|
|
"data_minimization": g("data_minimization"),
|
|
"alternatives_considered": g("alternatives_considered"),
|
|
"retention_justification": g("retention_justification"),
|
|
"involves_ai": g("involves_ai", False),
|
|
"overall_risk_level": g("overall_risk_level"),
|
|
"risk_score": g("risk_score", 0),
|
|
"dpo_consulted": g("dpo_consulted", False),
|
|
"dpo_consulted_at": _ts(g("dpo_consulted_at")),
|
|
"dpo_name": g("dpo_name"),
|
|
"dpo_opinion": g("dpo_opinion"),
|
|
"dpo_approved": g("dpo_approved"),
|
|
"authority_consulted": g("authority_consulted", False),
|
|
"authority_consulted_at": _ts(g("authority_consulted_at")),
|
|
"authority_reference": g("authority_reference"),
|
|
"authority_decision": g("authority_decision"),
|
|
"version": g("version", 1),
|
|
"previous_version_id": str(prev) if prev else None,
|
|
"conclusion": g("conclusion"),
|
|
"federal_state": g("federal_state"),
|
|
"authority_resource_id": g("authority_resource_id"),
|
|
"submitted_for_review_at": _ts(g("submitted_for_review_at")),
|
|
"submitted_by": g("submitted_by"),
|
|
"data_subjects": _parse_arr(g("data_subjects")),
|
|
"affected_rights": _parse_arr(g("affected_rights")),
|
|
"triggered_rule_codes": _parse_arr(g("triggered_rule_codes")),
|
|
"ai_trigger_ids": _parse_arr(g("ai_trigger_ids")),
|
|
"wp248_criteria_met": _parse_arr(g("wp248_criteria_met")),
|
|
"art35_abs3_triggered": _parse_arr(g("art35_abs3_triggered")),
|
|
"tom_references": _parse_arr(g("tom_references")),
|
|
"risks": _parse_arr(g("risks")),
|
|
"mitigations": _parse_arr(g("mitigations")),
|
|
"stakeholder_consultations": _parse_arr(g("stakeholder_consultations")),
|
|
"review_triggers": _parse_arr(g("review_triggers")),
|
|
"review_comments": _parse_arr(g("review_comments")),
|
|
"ai_use_case_modules": _parse_arr(g("ai_use_case_modules")),
|
|
"section_8_complete": g("section_8_complete", False),
|
|
"threshold_analysis": _parse_obj(g("threshold_analysis")),
|
|
"consultation_requirement": _parse_obj(g("consultation_requirement")),
|
|
"review_schedule": _parse_obj(g("review_schedule")),
|
|
"section_progress": _parse_obj(g("section_progress")),
|
|
"metadata": _parse_obj(g("metadata")),
|
|
}
|
|
|
|
|
|
def _log_audit(
|
|
db: Session, tenant_id: str, dsfa_id: Any, action: str,
|
|
changed_by: str = "system", old_values: Any = None,
|
|
new_values: Any = None,
|
|
) -> None:
|
|
db.execute(
|
|
text("""
|
|
INSERT INTO compliance_dsfa_audit_log
|
|
(tenant_id, dsfa_id, action, changed_by, old_values, new_values)
|
|
VALUES
|
|
(:tenant_id, :dsfa_id, :action, :changed_by,
|
|
CAST(:old_values AS jsonb), CAST(:new_values AS jsonb))
|
|
"""),
|
|
{
|
|
"tenant_id": tenant_id,
|
|
"dsfa_id": str(dsfa_id) if dsfa_id else None,
|
|
"action": action, "changed_by": changed_by,
|
|
"old_values": json.dumps(old_values) if old_values else None,
|
|
"new_values": json.dumps(new_values) if new_values else None,
|
|
},
|
|
)
|
|
|
|
|
|
# ---- Service ---------------------------------------------------------------
|
|
|
|
|
|
class DSFAService:
|
|
"""CRUD + stats + audit-log + CSV export."""
|
|
|
|
def __init__(self, db: Session) -> None:
|
|
self.db = db
|
|
|
|
def list_dsfas(
|
|
self, tenant_id: Optional[str], status: Optional[str],
|
|
risk_level: Optional[str], skip: int, limit: int,
|
|
) -> list[dict[str, Any]]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
sql = "SELECT * FROM compliance_dsfas WHERE tenant_id = :tid"
|
|
params: dict[str, Any] = {"tid": tid}
|
|
if status:
|
|
sql += " AND status = :status"; params["status"] = status
|
|
if risk_level:
|
|
sql += " AND risk_level = :risk_level"; params["risk_level"] = risk_level
|
|
sql += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip"
|
|
params["limit"] = limit; params["skip"] = skip
|
|
rows = self.db.execute(text(sql), params).fetchall()
|
|
return [_dsfa_to_response(r) for r in rows]
|
|
|
|
def create(
|
|
self, tenant_id: Optional[str], body: DSFACreate,
|
|
) -> dict[str, Any]:
|
|
if body.status not in VALID_STATUSES:
|
|
raise ValidationError(f"Ungültiger Status: {body.status}")
|
|
if body.risk_level not in VALID_RISK_LEVELS:
|
|
raise ValidationError(f"Ungültiges Risiko-Level: {body.risk_level}")
|
|
tid = _get_tenant_id(tenant_id)
|
|
row = self.db.execute(
|
|
text("""
|
|
INSERT INTO compliance_dsfas
|
|
(tenant_id, title, description, status, risk_level,
|
|
processing_activity, data_categories, recipients,
|
|
measures, created_by)
|
|
VALUES
|
|
(:tenant_id, :title, :description, :status, :risk_level,
|
|
:processing_activity,
|
|
CAST(:data_categories AS jsonb),
|
|
CAST(:recipients AS jsonb),
|
|
CAST(:measures AS jsonb),
|
|
:created_by)
|
|
RETURNING *
|
|
"""),
|
|
{
|
|
"tenant_id": tid, "title": body.title,
|
|
"description": body.description, "status": body.status,
|
|
"risk_level": body.risk_level,
|
|
"processing_activity": body.processing_activity,
|
|
"data_categories": json.dumps(body.data_categories),
|
|
"recipients": json.dumps(body.recipients),
|
|
"measures": json.dumps(body.measures),
|
|
"created_by": body.created_by,
|
|
},
|
|
).fetchone()
|
|
self.db.flush()
|
|
_log_audit(
|
|
self.db, tid, row["id"], "CREATE", body.created_by,
|
|
new_values={"title": body.title, "status": body.status},
|
|
)
|
|
self.db.commit()
|
|
return _dsfa_to_response(row)
|
|
|
|
def get(self, dsfa_id: str, tenant_id: Optional[str]) -> dict[str, Any]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
row = self.db.execute(
|
|
text("SELECT * FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
|
{"id": dsfa_id, "tid": tid},
|
|
).fetchone()
|
|
if not row:
|
|
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
|
return _dsfa_to_response(row)
|
|
|
|
def update(
|
|
self, dsfa_id: str, tenant_id: Optional[str], body: DSFAUpdate,
|
|
) -> dict[str, Any]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
existing = self.db.execute(
|
|
text("SELECT * FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
|
{"id": dsfa_id, "tid": tid},
|
|
).fetchone()
|
|
if not existing:
|
|
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
|
updates = body.model_dump(exclude_none=True)
|
|
if "status" in updates and updates["status"] not in VALID_STATUSES:
|
|
raise ValidationError(f"Ungültiger Status: {updates['status']}")
|
|
if "risk_level" in updates and updates["risk_level"] not in VALID_RISK_LEVELS:
|
|
raise ValidationError(f"Ungültiges Risiko-Level: {updates['risk_level']}")
|
|
if not updates:
|
|
return _dsfa_to_response(existing)
|
|
set_clauses: list[str] = []
|
|
params: dict[str, Any] = {"id": dsfa_id, "tid": tid}
|
|
for field, value in updates.items():
|
|
if field in JSONB_FIELDS:
|
|
set_clauses.append(f"{field} = CAST(:{field} AS jsonb)")
|
|
params[field] = json.dumps(value)
|
|
else:
|
|
set_clauses.append(f"{field} = :{field}")
|
|
params[field] = value
|
|
set_clauses.append("updated_at = NOW()")
|
|
sql = (
|
|
f"UPDATE compliance_dsfas SET {', '.join(set_clauses)} "
|
|
f"WHERE id = :id AND tenant_id = :tid RETURNING *"
|
|
)
|
|
old_values = {"title": existing["title"], "status": existing["status"]}
|
|
row = self.db.execute(text(sql), params).fetchone()
|
|
_log_audit(self.db, tid, dsfa_id, "UPDATE",
|
|
new_values=updates, old_values=old_values)
|
|
self.db.commit()
|
|
return _dsfa_to_response(row)
|
|
|
|
def delete(self, dsfa_id: str, tenant_id: Optional[str]) -> dict[str, Any]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
existing = self.db.execute(
|
|
text("SELECT id, title FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
|
{"id": dsfa_id, "tid": tid},
|
|
).fetchone()
|
|
if not existing:
|
|
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
|
_log_audit(self.db, tid, dsfa_id, "DELETE",
|
|
old_values={"title": existing["title"]})
|
|
self.db.execute(
|
|
text("DELETE FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
|
{"id": dsfa_id, "tid": tid},
|
|
)
|
|
self.db.commit()
|
|
return {"success": True, "message": f"DSFA {dsfa_id} gelöscht"}
|
|
|
|
def stats(self, tenant_id: Optional[str]) -> dict[str, Any]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
rows = self.db.execute(
|
|
text("SELECT status, risk_level FROM compliance_dsfas WHERE tenant_id = :tid"),
|
|
{"tid": tid},
|
|
).fetchall()
|
|
by_status: dict[str, int] = {}
|
|
by_risk: dict[str, int] = {}
|
|
for row in rows:
|
|
s = row["status"] or "draft"
|
|
r = row["risk_level"] or "low"
|
|
by_status[s] = by_status.get(s, 0) + 1
|
|
by_risk[r] = by_risk.get(r, 0) + 1
|
|
return {
|
|
"total": len(rows), "by_status": by_status, "by_risk_level": by_risk,
|
|
"draft_count": by_status.get("draft", 0),
|
|
"in_review_count": by_status.get("in-review", 0),
|
|
"approved_count": by_status.get("approved", 0),
|
|
"needs_update_count": by_status.get("needs-update", 0),
|
|
}
|
|
|
|
def audit_log(
|
|
self, tenant_id: Optional[str], limit: int, offset: int,
|
|
) -> list[dict[str, Any]]:
|
|
tid = _get_tenant_id(tenant_id)
|
|
rows = self.db.execute(
|
|
text("""
|
|
SELECT id, tenant_id, dsfa_id, action, changed_by,
|
|
old_values, new_values, created_at
|
|
FROM compliance_dsfa_audit_log
|
|
WHERE tenant_id = :tid
|
|
ORDER BY created_at DESC LIMIT :limit OFFSET :offset
|
|
"""),
|
|
{"tid": tid, "limit": limit, "offset": offset},
|
|
).fetchall()
|
|
result: list[dict[str, Any]] = []
|
|
for r in rows:
|
|
ca = r["created_at"]
|
|
result.append({
|
|
"id": str(r["id"]),
|
|
"tenant_id": r["tenant_id"],
|
|
"dsfa_id": str(r["dsfa_id"]) if r["dsfa_id"] else None,
|
|
"action": r["action"],
|
|
"changed_by": r["changed_by"],
|
|
"old_values": r["old_values"],
|
|
"new_values": r["new_values"],
|
|
"created_at": ca if isinstance(ca, str) else (ca.isoformat() if ca else None),
|
|
})
|
|
return result
|
|
|
|
def export_csv(self, tenant_id: Optional[str]) -> str:
|
|
tid = _get_tenant_id(tenant_id)
|
|
rows = self.db.execute(
|
|
text("SELECT * FROM compliance_dsfas WHERE tenant_id = :tid ORDER BY created_at DESC"),
|
|
{"tid": tid},
|
|
).fetchall()
|
|
output = io.StringIO()
|
|
writer = csv.writer(output, delimiter=";")
|
|
writer.writerow(["ID", "Titel", "Status", "Risiko-Level", "Erstellt", "Aktualisiert"])
|
|
for r in rows:
|
|
ca = r["created_at"]
|
|
ua = r["updated_at"]
|
|
writer.writerow([
|
|
str(r["id"]), r["title"], r["status"] or "draft", r["risk_level"] or "low",
|
|
ca if isinstance(ca, str) else (ca.isoformat() if ca else ""),
|
|
ua if isinstance(ua, str) else (ua.isoformat() if ua else ""),
|
|
])
|
|
return output.getvalue()
|