refactor(backend/api): extract DSFA schemas + services (Step 4 — file 14 of 18)
- Create compliance/schemas/dsfa.py (161 LOC) — extract DSFACreate, DSFAUpdate, DSFAStatusUpdate, DSFASectionUpdate, DSFAApproveRequest - Create compliance/services/dsfa_service.py (386 LOC) — CRUD + helpers + stats + audit-log + CSV export; uses domain errors - Create compliance/services/dsfa_workflow_service.py (347 LOC) — status update, section update, submit-for-review, approve, export JSON, versions - Rewrite compliance/api/dsfa_routes.py (339 LOC) as thin handlers with Depends + translate_domain_errors(); re-export legacy symbols via __all__ - Add [mypy-compliance.api.dsfa_routes] ignore_errors = False to mypy.ini - Update tests: 422 -> 400 for domain ValidationError (6 assertions) - Regenerate OpenAPI baseline (360 paths / 484 operations — unchanged) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
File diff suppressed because it is too large
Load Diff
161
backend-compliance/compliance/schemas/dsfa.py
Normal file
161
backend-compliance/compliance/schemas/dsfa.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""
|
||||
DSFA — Datenschutz-Folgenabschaetzung schemas (Art. 35 DSGVO).
|
||||
|
||||
Phase 1 Step 4: extracted from ``compliance.api.dsfa_routes``.
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DSFACreate(BaseModel):
|
||||
title: str
|
||||
description: str = ""
|
||||
status: str = "draft"
|
||||
risk_level: str = "low"
|
||||
processing_activity: str = ""
|
||||
data_categories: List[str] = []
|
||||
recipients: List[str] = []
|
||||
measures: List[str] = []
|
||||
created_by: str = "system"
|
||||
# Section 1
|
||||
processing_description: Optional[str] = None
|
||||
processing_purpose: Optional[str] = None
|
||||
legal_basis: Optional[str] = None
|
||||
legal_basis_details: Optional[str] = None
|
||||
# Section 2
|
||||
necessity_assessment: Optional[str] = None
|
||||
proportionality_assessment: Optional[str] = None
|
||||
data_minimization: Optional[str] = None
|
||||
alternatives_considered: Optional[str] = None
|
||||
retention_justification: Optional[str] = None
|
||||
# Section 3
|
||||
involves_ai: Optional[bool] = None
|
||||
overall_risk_level: Optional[str] = None
|
||||
risk_score: Optional[int] = None
|
||||
# Section 6
|
||||
dpo_consulted: Optional[bool] = None
|
||||
dpo_name: Optional[str] = None
|
||||
dpo_opinion: Optional[str] = None
|
||||
dpo_approved: Optional[bool] = None
|
||||
authority_consulted: Optional[bool] = None
|
||||
authority_reference: Optional[str] = None
|
||||
authority_decision: Optional[str] = None
|
||||
# Metadata
|
||||
version: Optional[int] = None
|
||||
conclusion: Optional[str] = None
|
||||
federal_state: Optional[str] = None
|
||||
authority_resource_id: Optional[str] = None
|
||||
submitted_by: Optional[str] = None
|
||||
# JSONB Arrays
|
||||
data_subjects: Optional[List[str]] = None
|
||||
affected_rights: Optional[List[str]] = None
|
||||
triggered_rule_codes: Optional[List[str]] = None
|
||||
ai_trigger_ids: Optional[List[str]] = None
|
||||
wp248_criteria_met: Optional[List[str]] = None
|
||||
art35_abs3_triggered: Optional[List[str]] = None
|
||||
tom_references: Optional[List[str]] = None
|
||||
risks: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
mitigations: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
stakeholder_consultations: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
review_triggers: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
review_comments: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
ai_use_case_modules: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
section_8_complete: Optional[bool] = None
|
||||
# JSONB Objects
|
||||
threshold_analysis: Optional[dict] = None # type: ignore[type-arg]
|
||||
consultation_requirement: Optional[dict] = None # type: ignore[type-arg]
|
||||
review_schedule: Optional[dict] = None # type: ignore[type-arg]
|
||||
section_progress: Optional[dict] = None # type: ignore[type-arg]
|
||||
metadata: Optional[dict] = None # type: ignore[type-arg]
|
||||
|
||||
|
||||
class DSFAUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
risk_level: Optional[str] = None
|
||||
processing_activity: Optional[str] = None
|
||||
data_categories: Optional[List[str]] = None
|
||||
recipients: Optional[List[str]] = None
|
||||
measures: Optional[List[str]] = None
|
||||
approved_by: Optional[str] = None
|
||||
# Section 1
|
||||
processing_description: Optional[str] = None
|
||||
processing_purpose: Optional[str] = None
|
||||
legal_basis: Optional[str] = None
|
||||
legal_basis_details: Optional[str] = None
|
||||
# Section 2
|
||||
necessity_assessment: Optional[str] = None
|
||||
proportionality_assessment: Optional[str] = None
|
||||
data_minimization: Optional[str] = None
|
||||
alternatives_considered: Optional[str] = None
|
||||
retention_justification: Optional[str] = None
|
||||
# Section 3
|
||||
involves_ai: Optional[bool] = None
|
||||
overall_risk_level: Optional[str] = None
|
||||
risk_score: Optional[int] = None
|
||||
# Section 6
|
||||
dpo_consulted: Optional[bool] = None
|
||||
dpo_name: Optional[str] = None
|
||||
dpo_opinion: Optional[str] = None
|
||||
dpo_approved: Optional[bool] = None
|
||||
authority_consulted: Optional[bool] = None
|
||||
authority_reference: Optional[str] = None
|
||||
authority_decision: Optional[str] = None
|
||||
# Metadata
|
||||
version: Optional[int] = None
|
||||
conclusion: Optional[str] = None
|
||||
federal_state: Optional[str] = None
|
||||
authority_resource_id: Optional[str] = None
|
||||
submitted_by: Optional[str] = None
|
||||
# JSONB Arrays
|
||||
data_subjects: Optional[List[str]] = None
|
||||
affected_rights: Optional[List[str]] = None
|
||||
triggered_rule_codes: Optional[List[str]] = None
|
||||
ai_trigger_ids: Optional[List[str]] = None
|
||||
wp248_criteria_met: Optional[List[str]] = None
|
||||
art35_abs3_triggered: Optional[List[str]] = None
|
||||
tom_references: Optional[List[str]] = None
|
||||
risks: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
mitigations: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
stakeholder_consultations: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
review_triggers: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
review_comments: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
ai_use_case_modules: Optional[List[dict]] = None # type: ignore[type-arg]
|
||||
section_8_complete: Optional[bool] = None
|
||||
# JSONB Objects
|
||||
threshold_analysis: Optional[dict] = None # type: ignore[type-arg]
|
||||
consultation_requirement: Optional[dict] = None # type: ignore[type-arg]
|
||||
review_schedule: Optional[dict] = None # type: ignore[type-arg]
|
||||
section_progress: Optional[dict] = None # type: ignore[type-arg]
|
||||
metadata: Optional[dict] = None # type: ignore[type-arg]
|
||||
|
||||
|
||||
class DSFAStatusUpdate(BaseModel):
|
||||
status: str
|
||||
approved_by: Optional[str] = None
|
||||
|
||||
|
||||
class DSFASectionUpdate(BaseModel):
|
||||
"""Body for PUT /dsfa/{id}/sections/{section_number}."""
|
||||
content: Optional[str] = None
|
||||
# Allow arbitrary extra fields so the frontend can send any section-specific data
|
||||
extra: Optional[dict] = None # type: ignore[type-arg]
|
||||
|
||||
|
||||
class DSFAApproveRequest(BaseModel):
|
||||
"""Body for POST /dsfa/{id}/approve."""
|
||||
approved: bool
|
||||
comments: Optional[str] = None
|
||||
approved_by: Optional[str] = None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DSFACreate",
|
||||
"DSFAUpdate",
|
||||
"DSFAStatusUpdate",
|
||||
"DSFASectionUpdate",
|
||||
"DSFAApproveRequest",
|
||||
]
|
||||
386
backend-compliance/compliance/services/dsfa_service.py
Normal file
386
backend-compliance/compliance/services/dsfa_service.py
Normal file
@@ -0,0 +1,386 @@
|
||||
# mypy: disable-error-code="arg-type,assignment,union-attr,no-any-return,call-overload,index,no-untyped-call"
|
||||
"""
|
||||
DSFA service — CRUD + helpers + stats + audit + CSV export.
|
||||
|
||||
Phase 1 Step 4: extracted from ``compliance.api.dsfa_routes``. The workflow
|
||||
side (status update, section update, submit, approve, export, versions) lives
|
||||
in ``compliance.services.dsfa_workflow_service``.
|
||||
|
||||
Module-level helpers (_dsfa_to_response, _get_tenant_id, _log_audit) are
|
||||
shared by both service modules and re-exported from
|
||||
``compliance.api.dsfa_routes`` for legacy test imports.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from compliance.domain import NotFoundError, ValidationError
|
||||
from compliance.schemas.dsfa import DSFACreate, DSFAUpdate
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
||||
VALID_STATUSES = {"draft", "in-review", "approved", "needs-update"}
|
||||
VALID_RISK_LEVELS = {"low", "medium", "high", "critical"}
|
||||
JSONB_FIELDS = {
|
||||
"data_categories", "recipients", "measures", "data_subjects",
|
||||
"affected_rights", "triggered_rule_codes", "ai_trigger_ids",
|
||||
"wp248_criteria_met", "art35_abs3_triggered", "tom_references",
|
||||
"risks", "mitigations", "stakeholder_consultations", "review_triggers",
|
||||
"review_comments", "ai_use_case_modules", "threshold_analysis",
|
||||
"consultation_requirement", "review_schedule", "section_progress",
|
||||
"metadata",
|
||||
}
|
||||
|
||||
# ---- Module-level helpers (re-exported by compliance.api.dsfa_routes) -----
|
||||
|
||||
|
||||
def _get_tenant_id(tenant_id: Optional[str]) -> str:
|
||||
return tenant_id or DEFAULT_TENANT_ID
|
||||
|
||||
|
||||
def _parse_arr(val: Any) -> Any:
|
||||
"""Parse a JSONB array field -> list."""
|
||||
if val is None:
|
||||
return []
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
parsed = json.loads(val)
|
||||
return parsed if isinstance(parsed, list) else []
|
||||
except Exception:
|
||||
return []
|
||||
return val
|
||||
|
||||
|
||||
def _parse_obj(val: Any) -> Any:
|
||||
"""Parse a JSONB object field -> dict."""
|
||||
if val is None:
|
||||
return {}
|
||||
if isinstance(val, dict):
|
||||
return val
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
parsed = json.loads(val)
|
||||
return parsed if isinstance(parsed, dict) else {}
|
||||
except Exception:
|
||||
return {}
|
||||
return val
|
||||
|
||||
|
||||
def _ts(val: Any) -> Any:
|
||||
"""Timestamp -> ISO string or None."""
|
||||
if not val:
|
||||
return None
|
||||
return val if isinstance(val, str) else val.isoformat()
|
||||
|
||||
|
||||
def _get(row: Any, key: str, default: Any = None) -> Any:
|
||||
"""Safe row access — returns default if key missing."""
|
||||
try:
|
||||
v = row[key]
|
||||
return default if v is None and default is not None else v
|
||||
except (KeyError, IndexError):
|
||||
return default
|
||||
|
||||
|
||||
def _dsfa_to_response(row: Any) -> dict[str, Any]:
|
||||
"""Convert a DB row to a JSON-serializable dict."""
|
||||
g = lambda k, d=None: _get(row, k, d) # noqa: E731
|
||||
prev = g("previous_version_id")
|
||||
return {
|
||||
"id": str(row["id"]),
|
||||
"tenant_id": row["tenant_id"],
|
||||
"title": row["title"],
|
||||
"description": row["description"] or "",
|
||||
"status": row["status"] or "draft",
|
||||
"risk_level": row["risk_level"] or "low",
|
||||
"processing_activity": row["processing_activity"] or "",
|
||||
"data_categories": _parse_arr(row["data_categories"]),
|
||||
"recipients": _parse_arr(row["recipients"]),
|
||||
"measures": _parse_arr(row["measures"]),
|
||||
"approved_by": row["approved_by"],
|
||||
"approved_at": _ts(row["approved_at"]),
|
||||
"created_by": row["created_by"] or "system",
|
||||
"created_at": _ts(row["created_at"]),
|
||||
"updated_at": _ts(row["updated_at"]),
|
||||
"processing_description": g("processing_description"),
|
||||
"processing_purpose": g("processing_purpose"),
|
||||
"legal_basis": g("legal_basis"),
|
||||
"legal_basis_details": g("legal_basis_details"),
|
||||
"necessity_assessment": g("necessity_assessment"),
|
||||
"proportionality_assessment": g("proportionality_assessment"),
|
||||
"data_minimization": g("data_minimization"),
|
||||
"alternatives_considered": g("alternatives_considered"),
|
||||
"retention_justification": g("retention_justification"),
|
||||
"involves_ai": g("involves_ai", False),
|
||||
"overall_risk_level": g("overall_risk_level"),
|
||||
"risk_score": g("risk_score", 0),
|
||||
"dpo_consulted": g("dpo_consulted", False),
|
||||
"dpo_consulted_at": _ts(g("dpo_consulted_at")),
|
||||
"dpo_name": g("dpo_name"),
|
||||
"dpo_opinion": g("dpo_opinion"),
|
||||
"dpo_approved": g("dpo_approved"),
|
||||
"authority_consulted": g("authority_consulted", False),
|
||||
"authority_consulted_at": _ts(g("authority_consulted_at")),
|
||||
"authority_reference": g("authority_reference"),
|
||||
"authority_decision": g("authority_decision"),
|
||||
"version": g("version", 1),
|
||||
"previous_version_id": str(prev) if prev else None,
|
||||
"conclusion": g("conclusion"),
|
||||
"federal_state": g("federal_state"),
|
||||
"authority_resource_id": g("authority_resource_id"),
|
||||
"submitted_for_review_at": _ts(g("submitted_for_review_at")),
|
||||
"submitted_by": g("submitted_by"),
|
||||
"data_subjects": _parse_arr(g("data_subjects")),
|
||||
"affected_rights": _parse_arr(g("affected_rights")),
|
||||
"triggered_rule_codes": _parse_arr(g("triggered_rule_codes")),
|
||||
"ai_trigger_ids": _parse_arr(g("ai_trigger_ids")),
|
||||
"wp248_criteria_met": _parse_arr(g("wp248_criteria_met")),
|
||||
"art35_abs3_triggered": _parse_arr(g("art35_abs3_triggered")),
|
||||
"tom_references": _parse_arr(g("tom_references")),
|
||||
"risks": _parse_arr(g("risks")),
|
||||
"mitigations": _parse_arr(g("mitigations")),
|
||||
"stakeholder_consultations": _parse_arr(g("stakeholder_consultations")),
|
||||
"review_triggers": _parse_arr(g("review_triggers")),
|
||||
"review_comments": _parse_arr(g("review_comments")),
|
||||
"ai_use_case_modules": _parse_arr(g("ai_use_case_modules")),
|
||||
"section_8_complete": g("section_8_complete", False),
|
||||
"threshold_analysis": _parse_obj(g("threshold_analysis")),
|
||||
"consultation_requirement": _parse_obj(g("consultation_requirement")),
|
||||
"review_schedule": _parse_obj(g("review_schedule")),
|
||||
"section_progress": _parse_obj(g("section_progress")),
|
||||
"metadata": _parse_obj(g("metadata")),
|
||||
}
|
||||
|
||||
|
||||
def _log_audit(
|
||||
db: Session, tenant_id: str, dsfa_id: Any, action: str,
|
||||
changed_by: str = "system", old_values: Any = None,
|
||||
new_values: Any = None,
|
||||
) -> None:
|
||||
db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_dsfa_audit_log
|
||||
(tenant_id, dsfa_id, action, changed_by, old_values, new_values)
|
||||
VALUES
|
||||
(:tenant_id, :dsfa_id, :action, :changed_by,
|
||||
CAST(:old_values AS jsonb), CAST(:new_values AS jsonb))
|
||||
"""),
|
||||
{
|
||||
"tenant_id": tenant_id,
|
||||
"dsfa_id": str(dsfa_id) if dsfa_id else None,
|
||||
"action": action, "changed_by": changed_by,
|
||||
"old_values": json.dumps(old_values) if old_values else None,
|
||||
"new_values": json.dumps(new_values) if new_values else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# ---- Service ---------------------------------------------------------------
|
||||
|
||||
|
||||
class DSFAService:
|
||||
"""CRUD + stats + audit-log + CSV export."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
self.db = db
|
||||
|
||||
def list_dsfas(
|
||||
self, tenant_id: Optional[str], status: Optional[str],
|
||||
risk_level: Optional[str], skip: int, limit: int,
|
||||
) -> list[dict[str, Any]]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
sql = "SELECT * FROM compliance_dsfas WHERE tenant_id = :tid"
|
||||
params: dict[str, Any] = {"tid": tid}
|
||||
if status:
|
||||
sql += " AND status = :status"; params["status"] = status
|
||||
if risk_level:
|
||||
sql += " AND risk_level = :risk_level"; params["risk_level"] = risk_level
|
||||
sql += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip"
|
||||
params["limit"] = limit; params["skip"] = skip
|
||||
rows = self.db.execute(text(sql), params).fetchall()
|
||||
return [_dsfa_to_response(r) for r in rows]
|
||||
|
||||
def create(
|
||||
self, tenant_id: Optional[str], body: DSFACreate,
|
||||
) -> dict[str, Any]:
|
||||
if body.status not in VALID_STATUSES:
|
||||
raise ValidationError(f"Ungültiger Status: {body.status}")
|
||||
if body.risk_level not in VALID_RISK_LEVELS:
|
||||
raise ValidationError(f"Ungültiges Risiko-Level: {body.risk_level}")
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
row = self.db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_dsfas
|
||||
(tenant_id, title, description, status, risk_level,
|
||||
processing_activity, data_categories, recipients,
|
||||
measures, created_by)
|
||||
VALUES
|
||||
(:tenant_id, :title, :description, :status, :risk_level,
|
||||
:processing_activity,
|
||||
CAST(:data_categories AS jsonb),
|
||||
CAST(:recipients AS jsonb),
|
||||
CAST(:measures AS jsonb),
|
||||
:created_by)
|
||||
RETURNING *
|
||||
"""),
|
||||
{
|
||||
"tenant_id": tid, "title": body.title,
|
||||
"description": body.description, "status": body.status,
|
||||
"risk_level": body.risk_level,
|
||||
"processing_activity": body.processing_activity,
|
||||
"data_categories": json.dumps(body.data_categories),
|
||||
"recipients": json.dumps(body.recipients),
|
||||
"measures": json.dumps(body.measures),
|
||||
"created_by": body.created_by,
|
||||
},
|
||||
).fetchone()
|
||||
self.db.flush()
|
||||
_log_audit(
|
||||
self.db, tid, row["id"], "CREATE", body.created_by,
|
||||
new_values={"title": body.title, "status": body.status},
|
||||
)
|
||||
self.db.commit()
|
||||
return _dsfa_to_response(row)
|
||||
|
||||
def get(self, dsfa_id: str, tenant_id: Optional[str]) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
row = self.db.execute(
|
||||
text("SELECT * FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
return _dsfa_to_response(row)
|
||||
|
||||
def update(
|
||||
self, dsfa_id: str, tenant_id: Optional[str], body: DSFAUpdate,
|
||||
) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text("SELECT * FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
updates = body.model_dump(exclude_none=True)
|
||||
if "status" in updates and updates["status"] not in VALID_STATUSES:
|
||||
raise ValidationError(f"Ungültiger Status: {updates['status']}")
|
||||
if "risk_level" in updates and updates["risk_level"] not in VALID_RISK_LEVELS:
|
||||
raise ValidationError(f"Ungültiges Risiko-Level: {updates['risk_level']}")
|
||||
if not updates:
|
||||
return _dsfa_to_response(existing)
|
||||
set_clauses: list[str] = []
|
||||
params: dict[str, Any] = {"id": dsfa_id, "tid": tid}
|
||||
for field, value in updates.items():
|
||||
if field in JSONB_FIELDS:
|
||||
set_clauses.append(f"{field} = CAST(:{field} AS jsonb)")
|
||||
params[field] = json.dumps(value)
|
||||
else:
|
||||
set_clauses.append(f"{field} = :{field}")
|
||||
params[field] = value
|
||||
set_clauses.append("updated_at = NOW()")
|
||||
sql = (
|
||||
f"UPDATE compliance_dsfas SET {', '.join(set_clauses)} "
|
||||
f"WHERE id = :id AND tenant_id = :tid RETURNING *"
|
||||
)
|
||||
old_values = {"title": existing["title"], "status": existing["status"]}
|
||||
row = self.db.execute(text(sql), params).fetchone()
|
||||
_log_audit(self.db, tid, dsfa_id, "UPDATE",
|
||||
new_values=updates, old_values=old_values)
|
||||
self.db.commit()
|
||||
return _dsfa_to_response(row)
|
||||
|
||||
def delete(self, dsfa_id: str, tenant_id: Optional[str]) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text("SELECT id, title FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
_log_audit(self.db, tid, dsfa_id, "DELETE",
|
||||
old_values={"title": existing["title"]})
|
||||
self.db.execute(
|
||||
text("DELETE FROM compliance_dsfas WHERE id = :id AND tenant_id = :tid"),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
)
|
||||
self.db.commit()
|
||||
return {"success": True, "message": f"DSFA {dsfa_id} gelöscht"}
|
||||
|
||||
def stats(self, tenant_id: Optional[str]) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
rows = self.db.execute(
|
||||
text("SELECT status, risk_level FROM compliance_dsfas WHERE tenant_id = :tid"),
|
||||
{"tid": tid},
|
||||
).fetchall()
|
||||
by_status: dict[str, int] = {}
|
||||
by_risk: dict[str, int] = {}
|
||||
for row in rows:
|
||||
s = row["status"] or "draft"
|
||||
r = row["risk_level"] or "low"
|
||||
by_status[s] = by_status.get(s, 0) + 1
|
||||
by_risk[r] = by_risk.get(r, 0) + 1
|
||||
return {
|
||||
"total": len(rows), "by_status": by_status, "by_risk_level": by_risk,
|
||||
"draft_count": by_status.get("draft", 0),
|
||||
"in_review_count": by_status.get("in-review", 0),
|
||||
"approved_count": by_status.get("approved", 0),
|
||||
"needs_update_count": by_status.get("needs-update", 0),
|
||||
}
|
||||
|
||||
def audit_log(
|
||||
self, tenant_id: Optional[str], limit: int, offset: int,
|
||||
) -> list[dict[str, Any]]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
rows = self.db.execute(
|
||||
text("""
|
||||
SELECT id, tenant_id, dsfa_id, action, changed_by,
|
||||
old_values, new_values, created_at
|
||||
FROM compliance_dsfa_audit_log
|
||||
WHERE tenant_id = :tid
|
||||
ORDER BY created_at DESC LIMIT :limit OFFSET :offset
|
||||
"""),
|
||||
{"tid": tid, "limit": limit, "offset": offset},
|
||||
).fetchall()
|
||||
result: list[dict[str, Any]] = []
|
||||
for r in rows:
|
||||
ca = r["created_at"]
|
||||
result.append({
|
||||
"id": str(r["id"]),
|
||||
"tenant_id": r["tenant_id"],
|
||||
"dsfa_id": str(r["dsfa_id"]) if r["dsfa_id"] else None,
|
||||
"action": r["action"],
|
||||
"changed_by": r["changed_by"],
|
||||
"old_values": r["old_values"],
|
||||
"new_values": r["new_values"],
|
||||
"created_at": ca if isinstance(ca, str) else (ca.isoformat() if ca else None),
|
||||
})
|
||||
return result
|
||||
|
||||
def export_csv(self, tenant_id: Optional[str]) -> str:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
rows = self.db.execute(
|
||||
text("SELECT * FROM compliance_dsfas WHERE tenant_id = :tid ORDER BY created_at DESC"),
|
||||
{"tid": tid},
|
||||
).fetchall()
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output, delimiter=";")
|
||||
writer.writerow(["ID", "Titel", "Status", "Risiko-Level", "Erstellt", "Aktualisiert"])
|
||||
for r in rows:
|
||||
ca = r["created_at"]
|
||||
ua = r["updated_at"]
|
||||
writer.writerow([
|
||||
str(r["id"]), r["title"], r["status"] or "draft", r["risk_level"] or "low",
|
||||
ca if isinstance(ca, str) else (ca.isoformat() if ca else ""),
|
||||
ua if isinstance(ua, str) else (ua.isoformat() if ua else ""),
|
||||
])
|
||||
return output.getvalue()
|
||||
347
backend-compliance/compliance/services/dsfa_workflow_service.py
Normal file
347
backend-compliance/compliance/services/dsfa_workflow_service.py
Normal file
@@ -0,0 +1,347 @@
|
||||
# mypy: disable-error-code="arg-type,assignment,union-attr,no-any-return,call-overload,index"
|
||||
"""
|
||||
DSFA workflow service — status, section update, submit, approve, export, versions.
|
||||
|
||||
Phase 1 Step 4: extracted from ``compliance.api.dsfa_routes``. CRUD + helpers
|
||||
live in ``compliance.services.dsfa_service``.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from compliance.api.versioning_utils import get_version, list_versions
|
||||
from compliance.domain import NotFoundError, ValidationError
|
||||
from compliance.schemas.dsfa import (
|
||||
DSFAApproveRequest,
|
||||
DSFASectionUpdate,
|
||||
DSFAStatusUpdate,
|
||||
)
|
||||
from compliance.services.dsfa_service import (
|
||||
VALID_STATUSES,
|
||||
_dsfa_to_response,
|
||||
_get_tenant_id,
|
||||
_log_audit,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SECTION_FIELD_MAP: dict[int, str] = {
|
||||
1: "processing_description",
|
||||
2: "necessity_assessment",
|
||||
3: "risk_assessment",
|
||||
4: "stakeholder_consultations",
|
||||
5: "measures",
|
||||
6: "dpo_opinion",
|
||||
7: "conclusion",
|
||||
8: "ai_use_case_modules",
|
||||
}
|
||||
|
||||
|
||||
class DSFAWorkflowService:
|
||||
"""Status update, section update, submit, approve, export, versions."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
self.db = db
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Status update
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def update_status(
|
||||
self,
|
||||
dsfa_id: str,
|
||||
tenant_id: Optional[str],
|
||||
body: DSFAStatusUpdate,
|
||||
) -> dict[str, Any]:
|
||||
if body.status not in VALID_STATUSES:
|
||||
raise ValidationError(f"Ungültiger Status: {body.status}")
|
||||
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text(
|
||||
"SELECT id, status FROM compliance_dsfas "
|
||||
"WHERE id = :id AND tenant_id = :tid"
|
||||
),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
|
||||
params: dict[str, Any] = {
|
||||
"id": dsfa_id,
|
||||
"tid": tid,
|
||||
"status": body.status,
|
||||
"approved_at": (
|
||||
datetime.now(timezone.utc)
|
||||
if body.status == "approved"
|
||||
else None
|
||||
),
|
||||
"approved_by": body.approved_by,
|
||||
}
|
||||
row = self.db.execute(
|
||||
text("""
|
||||
UPDATE compliance_dsfas
|
||||
SET status = :status, approved_at = :approved_at,
|
||||
approved_by = :approved_by, updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
params,
|
||||
).fetchone()
|
||||
|
||||
_log_audit(
|
||||
self.db, tid, dsfa_id, "STATUS_CHANGE",
|
||||
old_values={"status": existing["status"]},
|
||||
new_values={"status": body.status},
|
||||
)
|
||||
self.db.commit()
|
||||
return _dsfa_to_response(row)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Section update
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def update_section(
|
||||
self,
|
||||
dsfa_id: str,
|
||||
section_number: int,
|
||||
tenant_id: Optional[str],
|
||||
body: DSFASectionUpdate,
|
||||
) -> dict[str, Any]:
|
||||
if section_number < 1 or section_number > 8:
|
||||
raise ValidationError(
|
||||
f"Section must be 1-8, got {section_number}"
|
||||
)
|
||||
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text(
|
||||
"SELECT * FROM compliance_dsfas "
|
||||
"WHERE id = :id AND tenant_id = :tid"
|
||||
),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
|
||||
field = SECTION_FIELD_MAP[section_number]
|
||||
jsonb_sections = {4, 5, 8}
|
||||
|
||||
params: dict[str, Any] = {"id": dsfa_id, "tid": tid}
|
||||
|
||||
if section_number in jsonb_sections:
|
||||
value = (
|
||||
body.extra
|
||||
if body.extra is not None
|
||||
else ([] if section_number != 4 else [])
|
||||
)
|
||||
params["val"] = json.dumps(value)
|
||||
set_clause = f"{field} = CAST(:val AS jsonb)"
|
||||
else:
|
||||
params["val"] = body.content or ""
|
||||
set_clause = f"{field} = :val"
|
||||
|
||||
# Update section_progress
|
||||
progress = (
|
||||
existing["section_progress"]
|
||||
if existing["section_progress"]
|
||||
else {}
|
||||
)
|
||||
if isinstance(progress, str):
|
||||
progress = json.loads(progress)
|
||||
progress[f"section_{section_number}"] = True
|
||||
params["progress"] = json.dumps(progress)
|
||||
|
||||
row = self.db.execute(
|
||||
text(f"""
|
||||
UPDATE compliance_dsfas
|
||||
SET {set_clause},
|
||||
section_progress = CAST(:progress AS jsonb),
|
||||
updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
params,
|
||||
).fetchone()
|
||||
|
||||
_log_audit(
|
||||
self.db, tid, dsfa_id, "SECTION_UPDATE",
|
||||
new_values={"section": section_number, "field": field},
|
||||
)
|
||||
self.db.commit()
|
||||
return _dsfa_to_response(row)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Submit for review
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def submit_for_review(
|
||||
self, dsfa_id: str, tenant_id: Optional[str]
|
||||
) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text(
|
||||
"SELECT id, status FROM compliance_dsfas "
|
||||
"WHERE id = :id AND tenant_id = :tid"
|
||||
),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
|
||||
if existing["status"] not in ("draft", "needs-update"):
|
||||
raise ValidationError(
|
||||
f"Kann nur aus Status 'draft' oder 'needs-update' "
|
||||
f"eingereicht werden, aktuell: {existing['status']}"
|
||||
)
|
||||
|
||||
row = self.db.execute(
|
||||
text("""
|
||||
UPDATE compliance_dsfas
|
||||
SET status = 'in-review',
|
||||
submitted_for_review_at = NOW(),
|
||||
updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
|
||||
_log_audit(
|
||||
self.db, tid, dsfa_id, "SUBMIT_FOR_REVIEW",
|
||||
old_values={"status": existing["status"]},
|
||||
new_values={"status": "in-review"},
|
||||
)
|
||||
self.db.commit()
|
||||
return {
|
||||
"message": "DSFA zur Prüfung eingereicht",
|
||||
"status": "in-review",
|
||||
"dsfa": _dsfa_to_response(row),
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Approve / reject
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def approve(
|
||||
self,
|
||||
dsfa_id: str,
|
||||
tenant_id: Optional[str],
|
||||
body: DSFAApproveRequest,
|
||||
) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
existing = self.db.execute(
|
||||
text(
|
||||
"SELECT id, status FROM compliance_dsfas "
|
||||
"WHERE id = :id AND tenant_id = :tid"
|
||||
),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not existing:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
|
||||
if existing["status"] != "in-review":
|
||||
raise ValidationError(
|
||||
f"Nur DSFAs im Status 'in-review' können genehmigt werden, "
|
||||
f"aktuell: {existing['status']}"
|
||||
)
|
||||
|
||||
if body.approved:
|
||||
new_status = "approved"
|
||||
self.db.execute(
|
||||
text("""
|
||||
UPDATE compliance_dsfas
|
||||
SET status = 'approved',
|
||||
approved_by = :approved_by,
|
||||
approved_at = NOW(),
|
||||
updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
{
|
||||
"id": dsfa_id,
|
||||
"tid": tid,
|
||||
"approved_by": body.approved_by or "system",
|
||||
},
|
||||
).fetchone()
|
||||
else:
|
||||
new_status = "needs-update"
|
||||
self.db.execute(
|
||||
text("""
|
||||
UPDATE compliance_dsfas
|
||||
SET status = 'needs-update', updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
|
||||
_log_audit(
|
||||
self.db, tid, dsfa_id,
|
||||
"APPROVE" if body.approved else "REJECT",
|
||||
old_values={"status": existing["status"]},
|
||||
new_values={"status": new_status, "comments": body.comments},
|
||||
)
|
||||
self.db.commit()
|
||||
return {
|
||||
"message": (
|
||||
"DSFA genehmigt"
|
||||
if body.approved
|
||||
else "DSFA zurückgewiesen"
|
||||
),
|
||||
"status": new_status,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Export JSON
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def export_json(
|
||||
self, dsfa_id: str, tenant_id: Optional[str], fmt: str
|
||||
) -> dict[str, Any]:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
row = self.db.execute(
|
||||
text(
|
||||
"SELECT * FROM compliance_dsfas "
|
||||
"WHERE id = :id AND tenant_id = :tid"
|
||||
),
|
||||
{"id": dsfa_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise NotFoundError(f"DSFA {dsfa_id} nicht gefunden")
|
||||
|
||||
dsfa_data = _dsfa_to_response(row)
|
||||
return {
|
||||
"exported_at": datetime.now(timezone.utc).isoformat(),
|
||||
"format": fmt,
|
||||
"dsfa": dsfa_data,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Versions
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def list_versions(
|
||||
self, dsfa_id: str, tenant_id: Optional[str]
|
||||
) -> Any:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
return list_versions(self.db, "dsfa", dsfa_id, tid)
|
||||
|
||||
def get_version(
|
||||
self,
|
||||
dsfa_id: str,
|
||||
version_number: int,
|
||||
tenant_id: Optional[str],
|
||||
) -> Any:
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
v = get_version(self.db, "dsfa", dsfa_id, version_number, tid)
|
||||
if not v:
|
||||
raise NotFoundError(
|
||||
f"Version {version_number} not found"
|
||||
)
|
||||
return v
|
||||
Reference in New Issue
Block a user