feat(sdk): Multi-Tenancy, Versionierung, Change-Requests, Dokumentengenerierung (Phase 1-6)
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 32s
CI / test-python-backend-compliance (push) Successful in 30s
CI / test-python-document-crawler (push) Successful in 21s
CI / test-python-dsms-gateway (push) Successful in 18s
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 32s
CI / test-python-backend-compliance (push) Successful in 30s
CI / test-python-document-crawler (push) Successful in 21s
CI / test-python-dsms-gateway (push) Successful in 18s
6-Phasen-Implementation fuer cloud-faehiges, mandantenfaehiges Compliance SDK:
Phase 1: Multi-Tenancy Fix
- Shared tenant_utils.py Dependency (UUID-Validierung, kein "default" mehr)
- VVT tenant_id Column + tenant-scoped Queries
- DSFA/Vendor DEFAULT_TENANT_ID von "default" auf UUID migriert
- Migration 035
Phase 2: Stammdaten-Erweiterung
- Company Profile um JSONB-Felder erweitert (processing_systems, ai_systems, technical_contacts)
- Regulierungs-Flags (NIS2, AI Act, ISO 27001)
- GET /template-context Endpoint
- Migration 036
Phase 3: Dokument-Versionierung
- 5 Versions-Tabellen (DSFA, VVT, TOM, Loeschfristen, Obligations)
- Shared versioning_utils.py Helper
- /{id}/versions Endpoints auf allen 5 Dokumenttypen
- Migration 037
Phase 4: Change-Request System
- Zentrale CR-Inbox mit CRUD + Accept/Reject/Edit Workflow
- Regelbasierte CR-Engine (VVT DPIA → DSFA CR, Datenkategorien → Loeschfristen CR)
- Audit-Trail
- Migration 038
Phase 5: Dokumentengenerierung
- 5 Template-Generatoren (DSFA, VVT, TOM, Loeschfristen, Obligations)
- Preview + Apply Endpoints (erzeugt CRs, keine direkten Dokumente)
Phase 6: Frontend-Integration
- Change-Request Inbox Page mit Stats, Filtern, Modals
- VersionHistory Timeline-Komponente
- SDKSidebar CR-Badge (60s Polling)
- Company Profile: 2 neue Wizard-Steps + "Dokumente generieren" CTA
Docs: 5 neue MkDocs-Seiten, CLAUDE.md aktualisiert
Tests: 97 neue Tests (alle bestanden)
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -29,6 +29,8 @@ from .extraction_routes import router as extraction_router
|
||||
from .tom_routes import router as tom_router
|
||||
from .vendor_compliance_routes import router as vendor_compliance_router
|
||||
from .incident_routes import router as incident_router
|
||||
from .change_request_routes import router as change_request_router
|
||||
from .generation_routes import router as generation_router
|
||||
|
||||
# Include sub-routers
|
||||
router.include_router(audit_router)
|
||||
@@ -59,6 +61,8 @@ router.include_router(extraction_router)
|
||||
router.include_router(tom_router)
|
||||
router.include_router(vendor_compliance_router)
|
||||
router.include_router(incident_router)
|
||||
router.include_router(change_request_router)
|
||||
router.include_router(generation_router)
|
||||
|
||||
__all__ = [
|
||||
"router",
|
||||
@@ -89,4 +93,6 @@ __all__ = [
|
||||
"tom_router",
|
||||
"vendor_compliance_router",
|
||||
"incident_router",
|
||||
"change_request_router",
|
||||
"generation_router",
|
||||
]
|
||||
|
||||
181
backend-compliance/compliance/api/change_request_engine.py
Normal file
181
backend-compliance/compliance/api/change_request_engine.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""
|
||||
Change-Request Engine — Regelbasierte Generierung von Change-Requests.
|
||||
|
||||
Generates change requests when compliance-relevant events occur:
|
||||
- New high-risk use case → "DSFA erstellen"
|
||||
- AI involvement → "DSFA Section 3/8 aktualisieren"
|
||||
- New data categories → "VVT-Eintrag anlegen"
|
||||
- VVT dpia_required toggle → "DSFA erstellen"
|
||||
- New retention requirement → "Loeschfrist anlegen"
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_change_requests_for_vvt(
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
activity_data: dict,
|
||||
created_by: str = "system",
|
||||
) -> List[str]:
|
||||
"""Generate CRs when a VVT activity is created or updated.
|
||||
|
||||
Returns list of created CR IDs.
|
||||
"""
|
||||
cr_ids = []
|
||||
|
||||
# Rule 1: dpia_required=true → suggest DSFA
|
||||
if activity_data.get("dpia_required"):
|
||||
cr_id = _create_cr(
|
||||
db, tenant_id,
|
||||
trigger_type="vvt_dpia_required",
|
||||
target_document_type="dsfa",
|
||||
proposal_title=f"DSFA erstellen für VVT-Aktivität '{activity_data.get('name', 'Unbenannt')}'",
|
||||
proposal_body=f"Die VVT-Aktivität '{activity_data.get('name')}' wurde als DSFA-pflichtig markiert. "
|
||||
f"Eine Datenschutz-Folgenabschätzung nach Art. 35 DSGVO ist erforderlich.",
|
||||
proposed_changes={
|
||||
"source": "vvt_activity",
|
||||
"activity_name": activity_data.get("name"),
|
||||
"activity_vvt_id": activity_data.get("vvt_id"),
|
||||
},
|
||||
priority="high",
|
||||
created_by=created_by,
|
||||
)
|
||||
if cr_id:
|
||||
cr_ids.append(cr_id)
|
||||
|
||||
# Rule 2: New data categories → suggest Loeschfrist
|
||||
categories = activity_data.get("personal_data_categories", [])
|
||||
if categories:
|
||||
cr_id = _create_cr(
|
||||
db, tenant_id,
|
||||
trigger_type="vvt_data_categories",
|
||||
target_document_type="loeschfristen",
|
||||
proposal_title=f"Löschfrist für {len(categories)} Datenkategorie(n) prüfen",
|
||||
proposal_body=f"Die VVT-Aktivität '{activity_data.get('name')}' verarbeitet folgende Datenkategorien: "
|
||||
f"{', '.join(categories)}. Prüfen Sie, ob Löschfristen definiert sind.",
|
||||
proposed_changes={
|
||||
"source": "vvt_activity",
|
||||
"categories": categories,
|
||||
},
|
||||
priority="normal",
|
||||
created_by=created_by,
|
||||
)
|
||||
if cr_id:
|
||||
cr_ids.append(cr_id)
|
||||
|
||||
return cr_ids
|
||||
|
||||
|
||||
def generate_change_requests_for_use_case(
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
use_case_data: dict,
|
||||
created_by: str = "system",
|
||||
) -> List[str]:
|
||||
"""Generate CRs when a high-risk or AI use case is created.
|
||||
|
||||
Returns list of created CR IDs.
|
||||
"""
|
||||
cr_ids = []
|
||||
risk_level = use_case_data.get("risk_level", "low")
|
||||
involves_ai = use_case_data.get("involves_ai", False)
|
||||
title = use_case_data.get("title", "Unbenannt")
|
||||
|
||||
# Rule: High-risk use case → DSFA
|
||||
if risk_level in ("high", "critical"):
|
||||
cr_id = _create_cr(
|
||||
db, tenant_id,
|
||||
trigger_type="use_case_high_risk",
|
||||
target_document_type="dsfa",
|
||||
proposal_title=f"DSFA erstellen für '{title}' (Risiko: {risk_level})",
|
||||
proposal_body=f"Ein neuer Use Case mit hohem Risiko wurde erstellt. "
|
||||
f"Art. 35 DSGVO verlangt eine DSFA für Hochrisiko-Verarbeitungen.",
|
||||
proposed_changes={
|
||||
"source": "use_case",
|
||||
"title": title,
|
||||
"risk_level": risk_level,
|
||||
},
|
||||
priority="critical" if risk_level == "critical" else "high",
|
||||
created_by=created_by,
|
||||
)
|
||||
if cr_id:
|
||||
cr_ids.append(cr_id)
|
||||
|
||||
# Rule: AI involvement → DSFA section update
|
||||
if involves_ai:
|
||||
cr_id = _create_cr(
|
||||
db, tenant_id,
|
||||
trigger_type="use_case_ai",
|
||||
target_document_type="dsfa",
|
||||
target_section="section_3",
|
||||
proposal_title=f"DSFA Sektion 3/8 aktualisieren (KI in '{title}')",
|
||||
proposal_body=f"Der Use Case '{title}' nutzt KI-Systeme. "
|
||||
f"Die DSFA-Risikoanalyse (Sektion 3) und Maßnahmen (Sektion 8) müssen aktualisiert werden.",
|
||||
proposed_changes={
|
||||
"source": "use_case",
|
||||
"title": title,
|
||||
"involves_ai": True,
|
||||
},
|
||||
priority="high",
|
||||
created_by=created_by,
|
||||
)
|
||||
if cr_id:
|
||||
cr_ids.append(cr_id)
|
||||
|
||||
return cr_ids
|
||||
|
||||
|
||||
def _create_cr(
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
trigger_type: str,
|
||||
target_document_type: str,
|
||||
proposal_title: str,
|
||||
proposal_body: str = "",
|
||||
proposed_changes: dict = None,
|
||||
priority: str = "normal",
|
||||
target_document_id: str = None,
|
||||
target_section: str = None,
|
||||
trigger_source_id: str = None,
|
||||
created_by: str = "system",
|
||||
) -> Optional[str]:
|
||||
"""Internal helper to insert a change request."""
|
||||
try:
|
||||
result = db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_change_requests
|
||||
(tenant_id, trigger_type, trigger_source_id, target_document_type,
|
||||
target_document_id, target_section, proposal_title, proposal_body,
|
||||
proposed_changes, priority, created_by)
|
||||
VALUES (:tid, :trigger, :source_id, :doc_type,
|
||||
:doc_id, :section, :title, :body,
|
||||
CAST(:changes AS jsonb), :priority, :by)
|
||||
RETURNING id
|
||||
"""),
|
||||
{
|
||||
"tid": tenant_id,
|
||||
"trigger": trigger_type,
|
||||
"source_id": trigger_source_id,
|
||||
"doc_type": target_document_type,
|
||||
"doc_id": target_document_id,
|
||||
"section": target_section,
|
||||
"title": proposal_title,
|
||||
"body": proposal_body,
|
||||
"changes": json.dumps(proposed_changes or {}),
|
||||
"priority": priority,
|
||||
"by": created_by,
|
||||
},
|
||||
)
|
||||
row = result.fetchone()
|
||||
return str(row[0]) if row else None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create change request: {e}")
|
||||
return None
|
||||
427
backend-compliance/compliance/api/change_request_routes.py
Normal file
427
backend-compliance/compliance/api/change_request_routes.py
Normal file
@@ -0,0 +1,427 @@
|
||||
"""
|
||||
FastAPI routes for Change-Request System.
|
||||
|
||||
Endpoints:
|
||||
GET /change-requests — List (filter: status, doc_type, priority)
|
||||
GET /change-requests/stats — Summary counts
|
||||
GET /change-requests/{id} — Detail + audit log
|
||||
POST /change-requests — Create manually
|
||||
POST /change-requests/{id}/accept — Accept → create new version
|
||||
POST /change-requests/{id}/reject — Reject with reason
|
||||
POST /change-requests/{id}/edit — Edit proposal, then accept
|
||||
DELETE /change-requests/{id} — Soft-delete
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Header
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from classroom_engine.database import get_db
|
||||
from .tenant_utils import get_tenant_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/change-requests", tags=["change-requests"])
|
||||
|
||||
VALID_STATUSES = {"pending", "accepted", "rejected", "edited_and_accepted"}
|
||||
VALID_PRIORITIES = {"low", "normal", "high", "critical"}
|
||||
VALID_DOC_TYPES = {"dsfa", "vvt", "tom", "loeschfristen", "obligation"}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Schemas
|
||||
# =============================================================================
|
||||
|
||||
class ChangeRequestCreate(BaseModel):
|
||||
trigger_type: str = "manual"
|
||||
trigger_source_id: Optional[str] = None
|
||||
target_document_type: str
|
||||
target_document_id: Optional[str] = None
|
||||
target_section: Optional[str] = None
|
||||
proposal_title: str
|
||||
proposal_body: Optional[str] = None
|
||||
proposed_changes: dict = {}
|
||||
priority: str = "normal"
|
||||
|
||||
|
||||
class ChangeRequestEdit(BaseModel):
|
||||
proposal_body: Optional[str] = None
|
||||
proposed_changes: Optional[dict] = None
|
||||
|
||||
|
||||
class ChangeRequestReject(BaseModel):
|
||||
rejection_reason: str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Helpers
|
||||
# =============================================================================
|
||||
|
||||
def _cr_to_dict(row) -> dict:
|
||||
return {
|
||||
"id": str(row["id"]),
|
||||
"tenant_id": row["tenant_id"],
|
||||
"trigger_type": row["trigger_type"],
|
||||
"trigger_source_id": str(row["trigger_source_id"]) if row["trigger_source_id"] else None,
|
||||
"target_document_type": row["target_document_type"],
|
||||
"target_document_id": str(row["target_document_id"]) if row["target_document_id"] else None,
|
||||
"target_section": row["target_section"],
|
||||
"proposal_title": row["proposal_title"],
|
||||
"proposal_body": row["proposal_body"],
|
||||
"proposed_changes": row["proposed_changes"] or {},
|
||||
"status": row["status"],
|
||||
"priority": row["priority"],
|
||||
"decided_by": row["decided_by"],
|
||||
"decided_at": row["decided_at"].isoformat() if row["decided_at"] else None,
|
||||
"rejection_reason": row["rejection_reason"],
|
||||
"resulting_version_id": str(row["resulting_version_id"]) if row["resulting_version_id"] else None,
|
||||
"created_by": row["created_by"],
|
||||
"created_at": row["created_at"].isoformat() if row["created_at"] else None,
|
||||
"updated_at": row["updated_at"].isoformat() if row["updated_at"] else None,
|
||||
}
|
||||
|
||||
|
||||
def _log_cr_audit(db, cr_id, tenant_id, action, performed_by="system", before_state=None, after_state=None):
|
||||
db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_change_request_audit
|
||||
(change_request_id, tenant_id, action, performed_by, before_state, after_state)
|
||||
VALUES (:cr_id, :tid, :action, :by, CAST(:before AS jsonb), CAST(:after AS jsonb))
|
||||
"""),
|
||||
{
|
||||
"cr_id": cr_id,
|
||||
"tid": tenant_id,
|
||||
"action": action,
|
||||
"by": performed_by,
|
||||
"before": json.dumps(before_state) if before_state else None,
|
||||
"after": json.dumps(after_state) if after_state else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Routes
|
||||
# =============================================================================
|
||||
|
||||
@router.get("")
|
||||
async def list_change_requests(
|
||||
status: Optional[str] = Query(None),
|
||||
target_document_type: Optional[str] = Query(None),
|
||||
priority: Optional[str] = Query(None),
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=500),
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List change requests with optional filters."""
|
||||
sql = "SELECT * FROM compliance_change_requests WHERE tenant_id = :tid AND NOT is_deleted"
|
||||
params = {"tid": tid}
|
||||
|
||||
if status:
|
||||
sql += " AND status = :status"
|
||||
params["status"] = status
|
||||
if target_document_type:
|
||||
sql += " AND target_document_type = :doc_type"
|
||||
params["doc_type"] = target_document_type
|
||||
if priority:
|
||||
sql += " AND priority = :priority"
|
||||
params["priority"] = priority
|
||||
|
||||
sql += " ORDER BY CASE priority WHEN 'critical' THEN 0 WHEN 'high' THEN 1 WHEN 'normal' THEN 2 ELSE 3 END, created_at DESC"
|
||||
sql += " LIMIT :limit OFFSET :skip"
|
||||
params["limit"] = limit
|
||||
params["skip"] = skip
|
||||
|
||||
rows = db.execute(text(sql), params).fetchall()
|
||||
return [_cr_to_dict(r) for r in rows]
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_stats(
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Summary counts for change requests."""
|
||||
rows = db.execute(
|
||||
text("""
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE status = 'pending') AS total_pending,
|
||||
COUNT(*) FILTER (WHERE status = 'pending' AND priority = 'critical') AS critical_count,
|
||||
COUNT(*) FILTER (WHERE status = 'accepted' OR status = 'edited_and_accepted') AS total_accepted,
|
||||
COUNT(*) FILTER (WHERE status = 'rejected') AS total_rejected
|
||||
FROM compliance_change_requests
|
||||
WHERE tenant_id = :tid AND NOT is_deleted
|
||||
"""),
|
||||
{"tid": tid},
|
||||
).fetchone()
|
||||
|
||||
# By document type
|
||||
doc_type_rows = db.execute(
|
||||
text("""
|
||||
SELECT target_document_type, COUNT(*)
|
||||
FROM compliance_change_requests
|
||||
WHERE tenant_id = :tid AND status = 'pending' AND NOT is_deleted
|
||||
GROUP BY target_document_type
|
||||
"""),
|
||||
{"tid": tid},
|
||||
).fetchall()
|
||||
|
||||
return {
|
||||
"total_pending": rows[0] or 0,
|
||||
"critical_count": rows[1] or 0,
|
||||
"total_accepted": rows[2] or 0,
|
||||
"total_rejected": rows[3] or 0,
|
||||
"by_document_type": {r[0]: r[1] for r in doc_type_rows},
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{cr_id}")
|
||||
async def get_change_request(
|
||||
cr_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get change request detail with audit log."""
|
||||
row = db.execute(
|
||||
text("SELECT * FROM compliance_change_requests WHERE id = :id AND tenant_id = :tid AND NOT is_deleted"),
|
||||
{"id": cr_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Change request not found")
|
||||
|
||||
cr = _cr_to_dict(row)
|
||||
|
||||
# Attach audit log
|
||||
audit_rows = db.execute(
|
||||
text("""
|
||||
SELECT id, action, performed_by, before_state, after_state, created_at
|
||||
FROM compliance_change_request_audit
|
||||
WHERE change_request_id = :cr_id
|
||||
ORDER BY created_at DESC
|
||||
"""),
|
||||
{"cr_id": cr_id},
|
||||
).fetchall()
|
||||
|
||||
cr["audit_log"] = [
|
||||
{
|
||||
"id": str(a[0]),
|
||||
"action": a[1],
|
||||
"performed_by": a[2],
|
||||
"before_state": a[3],
|
||||
"after_state": a[4],
|
||||
"created_at": a[5].isoformat() if a[5] else None,
|
||||
}
|
||||
for a in audit_rows
|
||||
]
|
||||
return cr
|
||||
|
||||
|
||||
@router.post("", status_code=201)
|
||||
async def create_change_request(
|
||||
body: ChangeRequestCreate,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Create a change request manually."""
|
||||
if body.target_document_type not in VALID_DOC_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid target_document_type: {body.target_document_type}")
|
||||
if body.priority not in VALID_PRIORITIES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid priority: {body.priority}")
|
||||
|
||||
row = db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_change_requests
|
||||
(tenant_id, trigger_type, trigger_source_id, target_document_type,
|
||||
target_document_id, target_section, proposal_title, proposal_body,
|
||||
proposed_changes, priority, created_by)
|
||||
VALUES (:tid, :trigger_type, :trigger_source_id, :doc_type,
|
||||
:doc_id, :section, :title, :body,
|
||||
CAST(:changes AS jsonb), :priority, :created_by)
|
||||
RETURNING *
|
||||
"""),
|
||||
{
|
||||
"tid": tid,
|
||||
"trigger_type": body.trigger_type,
|
||||
"trigger_source_id": body.trigger_source_id,
|
||||
"doc_type": body.target_document_type,
|
||||
"doc_id": body.target_document_id,
|
||||
"section": body.target_section,
|
||||
"title": body.proposal_title,
|
||||
"body": body.proposal_body,
|
||||
"changes": json.dumps(body.proposed_changes),
|
||||
"priority": body.priority,
|
||||
"created_by": x_user_id or "system",
|
||||
},
|
||||
).fetchone()
|
||||
|
||||
_log_cr_audit(db, row["id"], tid, "CREATED", x_user_id or "system")
|
||||
db.commit()
|
||||
|
||||
return _cr_to_dict(row)
|
||||
|
||||
|
||||
@router.post("/{cr_id}/accept")
|
||||
async def accept_change_request(
|
||||
cr_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Accept a change request → creates a new document version."""
|
||||
row = db.execute(
|
||||
text("SELECT * FROM compliance_change_requests WHERE id = :id AND tenant_id = :tid AND NOT is_deleted"),
|
||||
{"id": cr_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Change request not found")
|
||||
if row["status"] != "pending":
|
||||
raise HTTPException(status_code=422, detail=f"Cannot accept CR in status '{row['status']}'")
|
||||
|
||||
user = x_user_id or "system"
|
||||
before_state = {"status": row["status"]}
|
||||
|
||||
# If there's a target document, create a version snapshot
|
||||
resulting_version_id = None
|
||||
if row["target_document_id"]:
|
||||
try:
|
||||
from .versioning_utils import create_version_snapshot
|
||||
version = create_version_snapshot(
|
||||
db,
|
||||
doc_type=row["target_document_type"],
|
||||
doc_id=str(row["target_document_id"]),
|
||||
tenant_id=tid,
|
||||
snapshot=row["proposed_changes"] or {},
|
||||
change_summary=f"Accepted CR: {row['proposal_title']}",
|
||||
created_by=user,
|
||||
)
|
||||
resulting_version_id = version["id"]
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not create version for CR {cr_id}: {e}")
|
||||
|
||||
# Update CR status
|
||||
updated = db.execute(
|
||||
text("""
|
||||
UPDATE compliance_change_requests
|
||||
SET status = 'accepted', decided_by = :user, decided_at = NOW(),
|
||||
resulting_version_id = :ver_id, updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
{"id": cr_id, "tid": tid, "user": user, "ver_id": resulting_version_id},
|
||||
).fetchone()
|
||||
|
||||
_log_cr_audit(db, cr_id, tid, "ACCEPTED", user, before_state, {"status": "accepted"})
|
||||
db.commit()
|
||||
|
||||
return _cr_to_dict(updated)
|
||||
|
||||
|
||||
@router.post("/{cr_id}/reject")
|
||||
async def reject_change_request(
|
||||
cr_id: str,
|
||||
body: ChangeRequestReject,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Reject a change request with reason."""
|
||||
row = db.execute(
|
||||
text("SELECT * FROM compliance_change_requests WHERE id = :id AND tenant_id = :tid AND NOT is_deleted"),
|
||||
{"id": cr_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Change request not found")
|
||||
if row["status"] != "pending":
|
||||
raise HTTPException(status_code=422, detail=f"Cannot reject CR in status '{row['status']}'")
|
||||
|
||||
user = x_user_id or "system"
|
||||
|
||||
updated = db.execute(
|
||||
text("""
|
||||
UPDATE compliance_change_requests
|
||||
SET status = 'rejected', decided_by = :user, decided_at = NOW(),
|
||||
rejection_reason = :reason, updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid
|
||||
RETURNING *
|
||||
"""),
|
||||
{"id": cr_id, "tid": tid, "user": user, "reason": body.rejection_reason},
|
||||
).fetchone()
|
||||
|
||||
_log_cr_audit(db, cr_id, tid, "REJECTED", user, {"status": "pending"}, {"status": "rejected", "reason": body.rejection_reason})
|
||||
db.commit()
|
||||
|
||||
return _cr_to_dict(updated)
|
||||
|
||||
|
||||
@router.post("/{cr_id}/edit")
|
||||
async def edit_change_request(
|
||||
cr_id: str,
|
||||
body: ChangeRequestEdit,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Edit the proposal, then auto-accept."""
|
||||
row = db.execute(
|
||||
text("SELECT * FROM compliance_change_requests WHERE id = :id AND tenant_id = :tid AND NOT is_deleted"),
|
||||
{"id": cr_id, "tid": tid},
|
||||
).fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Change request not found")
|
||||
if row["status"] != "pending":
|
||||
raise HTTPException(status_code=422, detail=f"Cannot edit CR in status '{row['status']}'")
|
||||
|
||||
user = x_user_id or "system"
|
||||
updates = []
|
||||
params = {"id": cr_id, "tid": tid, "user": user}
|
||||
|
||||
if body.proposal_body is not None:
|
||||
updates.append("proposal_body = :body")
|
||||
params["body"] = body.proposal_body
|
||||
if body.proposed_changes is not None:
|
||||
updates.append("proposed_changes = CAST(:changes AS jsonb)")
|
||||
params["changes"] = json.dumps(body.proposed_changes)
|
||||
|
||||
updates.append("status = 'edited_and_accepted'")
|
||||
updates.append("decided_by = :user")
|
||||
updates.append("decided_at = NOW()")
|
||||
updates.append("updated_at = NOW()")
|
||||
|
||||
sql = f"UPDATE compliance_change_requests SET {', '.join(updates)} WHERE id = :id AND tenant_id = :tid RETURNING *"
|
||||
updated = db.execute(text(sql), params).fetchone()
|
||||
|
||||
_log_cr_audit(db, cr_id, tid, "EDITED_AND_ACCEPTED", user, {"status": "pending"}, {"status": "edited_and_accepted"})
|
||||
db.commit()
|
||||
|
||||
return _cr_to_dict(updated)
|
||||
|
||||
|
||||
@router.delete("/{cr_id}")
|
||||
async def delete_change_request(
|
||||
cr_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Soft-delete a change request."""
|
||||
result = db.execute(
|
||||
text("""
|
||||
UPDATE compliance_change_requests
|
||||
SET is_deleted = TRUE, updated_at = NOW()
|
||||
WHERE id = :id AND tenant_id = :tid AND NOT is_deleted
|
||||
"""),
|
||||
{"id": cr_id, "tid": tid},
|
||||
)
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(status_code=404, detail="Change request not found")
|
||||
|
||||
_log_cr_audit(db, cr_id, tid, "DELETED", x_user_id or "system")
|
||||
db.commit()
|
||||
|
||||
return {"success": True, "message": "Change request deleted"}
|
||||
@@ -4,7 +4,9 @@ FastAPI routes for Company Profile CRUD with audit logging.
|
||||
Endpoints:
|
||||
- GET /v1/company-profile: Get company profile for a tenant
|
||||
- POST /v1/company-profile: Create or update company profile
|
||||
- DELETE /v1/company-profile: Delete company profile
|
||||
- GET /v1/company-profile/audit: Get audit log for a tenant
|
||||
- GET /v1/company-profile/template-context: Flat dict for template substitution
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -51,6 +53,17 @@ class CompanyProfileRequest(BaseModel):
|
||||
legal_contact_email: Optional[str] = None
|
||||
machine_builder: Optional[dict] = None
|
||||
is_complete: bool = False
|
||||
# Phase 2 fields
|
||||
repos: list[dict] = []
|
||||
document_sources: list[dict] = []
|
||||
processing_systems: list[dict] = []
|
||||
ai_systems: list[dict] = []
|
||||
technical_contacts: list[dict] = []
|
||||
subject_to_nis2: bool = False
|
||||
subject_to_ai_act: bool = False
|
||||
subject_to_iso27001: bool = False
|
||||
supervisory_authority: Optional[str] = None
|
||||
review_cycle_months: int = 12
|
||||
|
||||
|
||||
class CompanyProfileResponse(BaseModel):
|
||||
@@ -84,6 +97,17 @@ class CompanyProfileResponse(BaseModel):
|
||||
completed_at: Optional[str]
|
||||
created_at: str
|
||||
updated_at: str
|
||||
# Phase 2 fields
|
||||
repos: list[dict] = []
|
||||
document_sources: list[dict] = []
|
||||
processing_systems: list[dict] = []
|
||||
ai_systems: list[dict] = []
|
||||
technical_contacts: list[dict] = []
|
||||
subject_to_nis2: bool = False
|
||||
subject_to_ai_act: bool = False
|
||||
subject_to_iso27001: bool = False
|
||||
supervisory_authority: Optional[str] = None
|
||||
review_cycle_months: int = 12
|
||||
|
||||
|
||||
class AuditEntryResponse(BaseModel):
|
||||
@@ -99,6 +123,22 @@ class AuditListResponse(BaseModel):
|
||||
total: int
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SQL column lists — keep in sync with SELECT/INSERT
|
||||
# =============================================================================
|
||||
|
||||
_BASE_COLUMNS = """id, tenant_id, company_name, legal_form, industry, founded_year,
|
||||
business_model, offerings, company_size, employee_count, annual_revenue,
|
||||
headquarters_country, headquarters_city, has_international_locations,
|
||||
international_countries, target_markets, primary_jurisdiction,
|
||||
is_data_controller, is_data_processor, uses_ai, ai_use_cases,
|
||||
dpo_name, dpo_email, legal_contact_name, legal_contact_email,
|
||||
machine_builder, is_complete, completed_at, created_at, updated_at,
|
||||
repos, document_sources, processing_systems, ai_systems, technical_contacts,
|
||||
subject_to_nis2, subject_to_ai_act, subject_to_iso27001,
|
||||
supervisory_authority, review_cycle_months"""
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# HELPERS
|
||||
# =============================================================================
|
||||
@@ -136,6 +176,17 @@ def row_to_response(row) -> CompanyProfileResponse:
|
||||
completed_at=str(row[27]) if row[27] else None,
|
||||
created_at=str(row[28]),
|
||||
updated_at=str(row[29]),
|
||||
# Phase 2 fields (indices 30-39)
|
||||
repos=row[30] if isinstance(row[30], list) else [],
|
||||
document_sources=row[31] if isinstance(row[31], list) else [],
|
||||
processing_systems=row[32] if isinstance(row[32], list) else [],
|
||||
ai_systems=row[33] if isinstance(row[33], list) else [],
|
||||
technical_contacts=row[34] if isinstance(row[34], list) else [],
|
||||
subject_to_nis2=row[35] or False,
|
||||
subject_to_ai_act=row[36] or False,
|
||||
subject_to_iso27001=row[37] or False,
|
||||
supervisory_authority=row[38],
|
||||
review_cycle_months=row[39] or 12,
|
||||
)
|
||||
|
||||
|
||||
@@ -171,14 +222,7 @@ async def get_company_profile(
|
||||
db = SessionLocal()
|
||||
try:
|
||||
result = db.execute(
|
||||
"""SELECT id, tenant_id, company_name, legal_form, industry, founded_year,
|
||||
business_model, offerings, company_size, employee_count, annual_revenue,
|
||||
headquarters_country, headquarters_city, has_international_locations,
|
||||
international_countries, target_markets, primary_jurisdiction,
|
||||
is_data_controller, is_data_processor, uses_ai, ai_use_cases,
|
||||
dpo_name, dpo_email, legal_contact_name, legal_contact_email,
|
||||
machine_builder, is_complete, completed_at, created_at, updated_at
|
||||
FROM compliance_company_profiles WHERE tenant_id = :tenant_id""",
|
||||
f"SELECT {_BASE_COLUMNS} FROM compliance_company_profiles WHERE tenant_id = :tenant_id",
|
||||
{"tenant_id": tid},
|
||||
)
|
||||
row = result.fetchone()
|
||||
@@ -218,14 +262,21 @@ async def upsert_company_profile(
|
||||
international_countries, target_markets, primary_jurisdiction,
|
||||
is_data_controller, is_data_processor, uses_ai, ai_use_cases,
|
||||
dpo_name, dpo_email, legal_contact_name, legal_contact_email,
|
||||
machine_builder, is_complete)
|
||||
machine_builder, is_complete,
|
||||
repos, document_sources, processing_systems, ai_systems, technical_contacts,
|
||||
subject_to_nis2, subject_to_ai_act, subject_to_iso27001,
|
||||
supervisory_authority, review_cycle_months)
|
||||
VALUES (:tid, :company_name, :legal_form, :industry, :founded_year,
|
||||
:business_model, :offerings::jsonb, :company_size, :employee_count, :annual_revenue,
|
||||
:hq_country, :hq_city, :has_intl, :intl_countries::jsonb,
|
||||
:target_markets::jsonb, :jurisdiction,
|
||||
:is_controller, :is_processor, :uses_ai, :ai_use_cases::jsonb,
|
||||
:dpo_name, :dpo_email, :legal_name, :legal_email,
|
||||
:machine_builder::jsonb, :is_complete)
|
||||
:machine_builder::jsonb, :is_complete,
|
||||
:repos::jsonb, :document_sources::jsonb, :processing_systems::jsonb,
|
||||
:ai_systems::jsonb, :technical_contacts::jsonb,
|
||||
:subject_to_nis2, :subject_to_ai_act, :subject_to_iso27001,
|
||||
:supervisory_authority, :review_cycle_months)
|
||||
ON CONFLICT (tenant_id) DO UPDATE SET
|
||||
company_name = EXCLUDED.company_name,
|
||||
legal_form = EXCLUDED.legal_form,
|
||||
@@ -252,6 +303,16 @@ async def upsert_company_profile(
|
||||
legal_contact_email = EXCLUDED.legal_contact_email,
|
||||
machine_builder = EXCLUDED.machine_builder,
|
||||
is_complete = EXCLUDED.is_complete,
|
||||
repos = EXCLUDED.repos,
|
||||
document_sources = EXCLUDED.document_sources,
|
||||
processing_systems = EXCLUDED.processing_systems,
|
||||
ai_systems = EXCLUDED.ai_systems,
|
||||
technical_contacts = EXCLUDED.technical_contacts,
|
||||
subject_to_nis2 = EXCLUDED.subject_to_nis2,
|
||||
subject_to_ai_act = EXCLUDED.subject_to_ai_act,
|
||||
subject_to_iso27001 = EXCLUDED.subject_to_iso27001,
|
||||
supervisory_authority = EXCLUDED.supervisory_authority,
|
||||
review_cycle_months = EXCLUDED.review_cycle_months,
|
||||
updated_at = NOW()
|
||||
{completed_at_clause}""",
|
||||
{
|
||||
@@ -281,6 +342,16 @@ async def upsert_company_profile(
|
||||
"legal_email": profile.legal_contact_email,
|
||||
"machine_builder": json.dumps(profile.machine_builder) if profile.machine_builder else None,
|
||||
"is_complete": profile.is_complete,
|
||||
"repos": json.dumps(profile.repos),
|
||||
"document_sources": json.dumps(profile.document_sources),
|
||||
"processing_systems": json.dumps(profile.processing_systems),
|
||||
"ai_systems": json.dumps(profile.ai_systems),
|
||||
"technical_contacts": json.dumps(profile.technical_contacts),
|
||||
"subject_to_nis2": profile.subject_to_nis2,
|
||||
"subject_to_ai_act": profile.subject_to_ai_act,
|
||||
"subject_to_iso27001": profile.subject_to_iso27001,
|
||||
"supervisory_authority": profile.supervisory_authority,
|
||||
"review_cycle_months": profile.review_cycle_months,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -291,14 +362,7 @@ async def upsert_company_profile(
|
||||
|
||||
# Fetch and return
|
||||
result = db.execute(
|
||||
"""SELECT id, tenant_id, company_name, legal_form, industry, founded_year,
|
||||
business_model, offerings, company_size, employee_count, annual_revenue,
|
||||
headquarters_country, headquarters_city, has_international_locations,
|
||||
international_countries, target_markets, primary_jurisdiction,
|
||||
is_data_controller, is_data_processor, uses_ai, ai_use_cases,
|
||||
dpo_name, dpo_email, legal_contact_name, legal_contact_email,
|
||||
machine_builder, is_complete, completed_at, created_at, updated_at
|
||||
FROM compliance_company_profiles WHERE tenant_id = :tid""",
|
||||
f"SELECT {_BASE_COLUMNS} FROM compliance_company_profiles WHERE tenant_id = :tid",
|
||||
{"tid": tid},
|
||||
)
|
||||
row = result.fetchone()
|
||||
@@ -347,6 +411,68 @@ async def delete_company_profile(
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/template-context")
|
||||
async def get_template_context(
|
||||
tenant_id: str = "default",
|
||||
x_tenant_id: Optional[str] = Header(None, alias="X-Tenant-ID"),
|
||||
):
|
||||
"""Return flat dict for Jinja2 template substitution in document generation."""
|
||||
tid = x_tenant_id or tenant_id
|
||||
db = SessionLocal()
|
||||
try:
|
||||
result = db.execute(
|
||||
f"SELECT {_BASE_COLUMNS} FROM compliance_company_profiles WHERE tenant_id = :tid",
|
||||
{"tid": tid},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Company profile not found — fill Stammdaten first")
|
||||
|
||||
resp = row_to_response(row)
|
||||
# Build flat context dict for templates
|
||||
ctx = {
|
||||
"company_name": resp.company_name,
|
||||
"legal_form": resp.legal_form,
|
||||
"industry": resp.industry,
|
||||
"business_model": resp.business_model,
|
||||
"company_size": resp.company_size,
|
||||
"employee_count": resp.employee_count,
|
||||
"headquarters_country": resp.headquarters_country,
|
||||
"headquarters_city": resp.headquarters_city,
|
||||
"primary_jurisdiction": resp.primary_jurisdiction,
|
||||
"is_data_controller": resp.is_data_controller,
|
||||
"is_data_processor": resp.is_data_processor,
|
||||
"uses_ai": resp.uses_ai,
|
||||
"dpo_name": resp.dpo_name or "",
|
||||
"dpo_email": resp.dpo_email or "",
|
||||
"legal_contact_name": resp.legal_contact_name or "",
|
||||
"legal_contact_email": resp.legal_contact_email or "",
|
||||
"supervisory_authority": resp.supervisory_authority or "",
|
||||
"review_cycle_months": resp.review_cycle_months,
|
||||
"subject_to_nis2": resp.subject_to_nis2,
|
||||
"subject_to_ai_act": resp.subject_to_ai_act,
|
||||
"subject_to_iso27001": resp.subject_to_iso27001,
|
||||
# Lists as-is for iteration in templates
|
||||
"offerings": resp.offerings,
|
||||
"target_markets": resp.target_markets,
|
||||
"international_countries": resp.international_countries,
|
||||
"ai_use_cases": resp.ai_use_cases,
|
||||
"repos": resp.repos,
|
||||
"document_sources": resp.document_sources,
|
||||
"processing_systems": resp.processing_systems,
|
||||
"ai_systems": resp.ai_systems,
|
||||
"technical_contacts": resp.technical_contacts,
|
||||
# Derived helper values
|
||||
"has_ai_systems": len(resp.ai_systems) > 0,
|
||||
"processing_system_count": len(resp.processing_systems),
|
||||
"ai_system_count": len(resp.ai_systems),
|
||||
"is_complete": resp.is_complete,
|
||||
}
|
||||
return ctx
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@router.get("/audit", response_model=AuditListResponse)
|
||||
async def get_audit_log(
|
||||
tenant_id: str = "default",
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
"""Document generation templates for compliance documents."""
|
||||
|
||||
from .dsfa_template import generate_dsfa_draft
|
||||
from .vvt_template import generate_vvt_drafts
|
||||
from .loeschfristen_template import generate_loeschfristen_drafts
|
||||
from .tom_template import generate_tom_drafts
|
||||
from .obligation_template import generate_obligation_drafts
|
||||
|
||||
__all__ = [
|
||||
"generate_dsfa_draft",
|
||||
"generate_vvt_drafts",
|
||||
"generate_loeschfristen_drafts",
|
||||
"generate_tom_drafts",
|
||||
"generate_obligation_drafts",
|
||||
]
|
||||
@@ -0,0 +1,82 @@
|
||||
"""DSFA template generator — creates DSFA skeleton from company profile."""
|
||||
|
||||
|
||||
def generate_dsfa_draft(ctx: dict) -> dict:
|
||||
"""Generate a DSFA draft document from template context.
|
||||
|
||||
Args:
|
||||
ctx: Flat dict from company-profile/template-context endpoint
|
||||
|
||||
Returns:
|
||||
Dict with DSFA fields ready for creation
|
||||
"""
|
||||
company = ctx.get("company_name", "Unbekannt")
|
||||
dpo = ctx.get("dpo_name", "")
|
||||
dpo_email = ctx.get("dpo_email", "")
|
||||
|
||||
sections = {
|
||||
"section_1": {
|
||||
"title": "Beschreibung der Verarbeitung",
|
||||
"content": f"Die {company} führt eine Datenschutz-Folgenabschätzung gemäß Art. 35 DSGVO durch.\n\n"
|
||||
f"**Verantwortlicher:** {company}\n"
|
||||
f"**Datenschutzbeauftragter:** {dpo} ({dpo_email})\n"
|
||||
f"**Zuständige Aufsichtsbehörde:** {ctx.get('supervisory_authority', 'Nicht angegeben')}",
|
||||
},
|
||||
"section_2": {
|
||||
"title": "Notwendigkeit und Verhältnismäßigkeit",
|
||||
"content": "Die Verarbeitung ist zur Erreichung des beschriebenen Zwecks erforderlich. "
|
||||
"Alternative, weniger eingriffsintensive Maßnahmen wurden geprüft.",
|
||||
},
|
||||
"section_3": {
|
||||
"title": "Risiken für die Rechte und Freiheiten",
|
||||
"content": _generate_risk_section(ctx),
|
||||
},
|
||||
"section_6": {
|
||||
"title": "Stellungnahme des DSB",
|
||||
"content": f"Der Datenschutzbeauftragte ({dpo}) wurde konsultiert." if dpo else
|
||||
"Ein Datenschutzbeauftragter wurde noch nicht benannt.",
|
||||
},
|
||||
}
|
||||
|
||||
ai_systems = ctx.get("ai_systems", [])
|
||||
involves_ai = len(ai_systems) > 0
|
||||
|
||||
return {
|
||||
"title": f"DSFA — {company}",
|
||||
"description": f"Automatisch generierte Datenschutz-Folgenabschätzung für {company}",
|
||||
"status": "draft",
|
||||
"risk_level": "high" if involves_ai else "medium",
|
||||
"involves_ai": involves_ai,
|
||||
"dpo_name": dpo,
|
||||
"sections": sections,
|
||||
"processing_systems": [s.get("name", "") for s in ctx.get("processing_systems", [])],
|
||||
"ai_systems_summary": [
|
||||
{"name": s.get("name"), "risk": s.get("risk_category", "unknown")}
|
||||
for s in ai_systems
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _generate_risk_section(ctx: dict) -> str:
|
||||
lines = ["## Risikoanalyse\n"]
|
||||
|
||||
if ctx.get("has_ai_systems"):
|
||||
lines.append("### KI-Systeme\n")
|
||||
for s in ctx.get("ai_systems", []):
|
||||
risk = s.get("risk_category", "unbekannt")
|
||||
lines.append(f"- **{s.get('name', 'N/A')}**: Zweck: {s.get('purpose', 'N/A')}, "
|
||||
f"Risiko: {risk}, Human Oversight: {'Ja' if s.get('has_human_oversight') else 'Nein'}")
|
||||
lines.append("")
|
||||
|
||||
if ctx.get("subject_to_ai_act"):
|
||||
lines.append("**Hinweis:** Das Unternehmen unterliegt dem EU AI Act. "
|
||||
"KI-spezifische Risiken müssen gemäß der KI-Verordnung bewertet werden.\n")
|
||||
|
||||
if ctx.get("subject_to_nis2"):
|
||||
lines.append("**Hinweis:** NIS2-Richtlinie ist anwendbar. "
|
||||
"Cybersicherheitsrisiken sind zusätzlich zu bewerten.\n")
|
||||
|
||||
if not ctx.get("has_ai_systems") and not ctx.get("subject_to_nis2"):
|
||||
lines.append("Standardrisiken der Datenverarbeitung sind zu bewerten.\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
@@ -0,0 +1,49 @@
|
||||
"""Loeschfristen template generator — creates retention policies per data category."""
|
||||
|
||||
# Standard DSGVO retention periods
|
||||
_STANDARD_PERIODS = {
|
||||
"Bankdaten": {"duration": "10 Jahre", "legal_basis": "§ 257 HGB, § 147 AO"},
|
||||
"Steuer-ID": {"duration": "10 Jahre", "legal_basis": "§ 147 AO"},
|
||||
"Bewerbungsunterlagen": {"duration": "6 Monate", "legal_basis": "§ 15 AGG"},
|
||||
"Lohnabrechnungen": {"duration": "6 Jahre", "legal_basis": "§ 257 HGB"},
|
||||
"Gesundheitsdaten": {"duration": "10 Jahre", "legal_basis": "§ 630f BGB"},
|
||||
"Kundendaten": {"duration": "3 Jahre", "legal_basis": "§ 195 BGB (Verjährung)"},
|
||||
"Vertragsdaten": {"duration": "10 Jahre", "legal_basis": "§ 257 HGB"},
|
||||
"Kommunikationsdaten": {"duration": "6 Monate", "legal_basis": "Art. 5 Abs. 1e DSGVO"},
|
||||
"Zugriffsprotokolle": {"duration": "12 Monate", "legal_basis": "Art. 5 Abs. 1e DSGVO"},
|
||||
"Mitarbeiterdaten": {"duration": "3 Jahre nach Austritt", "legal_basis": "§ 195 BGB"},
|
||||
}
|
||||
|
||||
|
||||
def generate_loeschfristen_drafts(ctx: dict) -> list[dict]:
|
||||
"""Generate retention policy drafts based on processing systems.
|
||||
|
||||
Args:
|
||||
ctx: Flat dict from company-profile/template-context
|
||||
|
||||
Returns:
|
||||
List of Loeschfristen dicts ready for creation
|
||||
"""
|
||||
# Collect all data categories from processing systems
|
||||
all_categories = set()
|
||||
for system in ctx.get("processing_systems", []):
|
||||
for cat in system.get("personal_data_categories", []):
|
||||
all_categories.add(cat)
|
||||
|
||||
policies = []
|
||||
for i, category in enumerate(sorted(all_categories), 1):
|
||||
standard = _STANDARD_PERIODS.get(category, {})
|
||||
policy = {
|
||||
"policy_id": f"LF-AUTO-{i:03d}",
|
||||
"data_category": category,
|
||||
"retention_period": standard.get("duration", "Noch festzulegen"),
|
||||
"legal_basis": standard.get("legal_basis", "Zu prüfen"),
|
||||
"deletion_method": "Automatische Löschung nach Ablauf",
|
||||
"responsible": ctx.get("dpo_name", "DSB"),
|
||||
"status": "draft",
|
||||
"review_cycle_months": ctx.get("review_cycle_months", 12),
|
||||
"notes": f"Automatisch generiert aus Stammdaten. Bitte prüfen und anpassen.",
|
||||
}
|
||||
policies.append(policy)
|
||||
|
||||
return policies
|
||||
@@ -0,0 +1,141 @@
|
||||
"""Obligation template generator — creates standard DSGVO obligations."""
|
||||
|
||||
_DSGVO_OBLIGATIONS = [
|
||||
{
|
||||
"title": "Verzeichnis der Verarbeitungstätigkeiten führen",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 30",
|
||||
"description": "Das Verzeichnis muss alle Verarbeitungstätigkeiten mit personenbezogenen Daten dokumentieren.",
|
||||
"category": "documentation",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "Datenschutz-Folgenabschätzung durchführen",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 35",
|
||||
"description": "Für Verarbeitungen mit hohem Risiko muss eine DSFA vor Beginn der Verarbeitung durchgeführt werden.",
|
||||
"category": "risk_assessment",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "Technisch-organisatorische Maßnahmen implementieren",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 32",
|
||||
"description": "Angemessene TOMs zum Schutz personenbezogener Daten unter Berücksichtigung des Stands der Technik.",
|
||||
"category": "technical",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "Betroffenenrechte sicherstellen",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 12-22",
|
||||
"description": "Auskunft, Berichtigung, Löschung, Datenportabilität, Widerspruch — alle Rechte müssen binnen eines Monats erfüllt werden.",
|
||||
"category": "data_subject_rights",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "Datenschutzverletzungen melden",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 33-34",
|
||||
"description": "Meldung an die Aufsichtsbehörde binnen 72 Stunden; Benachrichtigung Betroffener bei hohem Risiko.",
|
||||
"category": "incident_response",
|
||||
"priority": "critical",
|
||||
},
|
||||
{
|
||||
"title": "Auftragsverarbeitungsverträge abschließen",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 28",
|
||||
"description": "Schriftliche AV-Verträge mit allen Dienstleistern, die personenbezogene Daten verarbeiten.",
|
||||
"category": "vendor_management",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "Datenschutzbeauftragten benennen",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 37",
|
||||
"description": "Pflicht bei ≥20 Mitarbeitern in der automatisierten Verarbeitung oder bei Verarbeitung besonderer Kategorien.",
|
||||
"category": "governance",
|
||||
"priority": "medium",
|
||||
},
|
||||
{
|
||||
"title": "Löschkonzept implementieren",
|
||||
"regulation": "DSGVO",
|
||||
"article": "Art. 17",
|
||||
"description": "Recht auf Löschung — systematisches Löschkonzept mit definierten Fristen pro Datenkategorie.",
|
||||
"category": "data_lifecycle",
|
||||
"priority": "high",
|
||||
},
|
||||
]
|
||||
|
||||
_AI_ACT_OBLIGATIONS = [
|
||||
{
|
||||
"title": "KI-System-Register führen",
|
||||
"regulation": "EU AI Act",
|
||||
"article": "Art. 49",
|
||||
"description": "Alle KI-Systeme müssen in der EU-Datenbank registriert werden (Hochrisiko).",
|
||||
"category": "documentation",
|
||||
"priority": "high",
|
||||
},
|
||||
{
|
||||
"title": "KI-Risikomanagement einrichten",
|
||||
"regulation": "EU AI Act",
|
||||
"article": "Art. 9",
|
||||
"description": "Risikomanagementsystem für den gesamten Lebenszyklus von Hochrisiko-KI-Systemen.",
|
||||
"category": "risk_assessment",
|
||||
"priority": "critical",
|
||||
},
|
||||
{
|
||||
"title": "KI-Transparenzpflichten erfüllen",
|
||||
"regulation": "EU AI Act",
|
||||
"article": "Art. 52",
|
||||
"description": "Nutzer müssen über die Interaktion mit KI-Systemen informiert werden.",
|
||||
"category": "transparency",
|
||||
"priority": "high",
|
||||
},
|
||||
]
|
||||
|
||||
_NIS2_OBLIGATIONS = [
|
||||
{
|
||||
"title": "Cybersicherheits-Risikomanagement",
|
||||
"regulation": "NIS2",
|
||||
"article": "Art. 21",
|
||||
"description": "Angemessene und verhältnismäßige technische, betriebliche und organisatorische Maßnahmen.",
|
||||
"category": "cybersecurity",
|
||||
"priority": "critical",
|
||||
},
|
||||
{
|
||||
"title": "Meldepflichten NIS2",
|
||||
"regulation": "NIS2",
|
||||
"article": "Art. 23",
|
||||
"description": "Frühwarnung binnen 24h, Vorfallmeldung binnen 72h, Abschlussbericht binnen 1 Monat.",
|
||||
"category": "incident_response",
|
||||
"priority": "critical",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def generate_obligation_drafts(ctx: dict) -> list[dict]:
|
||||
"""Generate obligation drafts based on regulatory flags.
|
||||
|
||||
Args:
|
||||
ctx: Flat dict from company-profile/template-context
|
||||
|
||||
Returns:
|
||||
List of obligation dicts ready for creation
|
||||
"""
|
||||
obligations = list(_DSGVO_OBLIGATIONS)
|
||||
|
||||
if ctx.get("subject_to_ai_act") or ctx.get("has_ai_systems"):
|
||||
obligations.extend(_AI_ACT_OBLIGATIONS)
|
||||
|
||||
if ctx.get("subject_to_nis2"):
|
||||
obligations.extend(_NIS2_OBLIGATIONS)
|
||||
|
||||
# Enrich with company context
|
||||
for i, o in enumerate(obligations, 1):
|
||||
o["obligation_id"] = f"OBL-AUTO-{i:03d}"
|
||||
o["status"] = "open"
|
||||
o["responsible"] = ctx.get("dpo_name", "")
|
||||
o["review_cycle_months"] = ctx.get("review_cycle_months", 12)
|
||||
|
||||
return obligations
|
||||
@@ -0,0 +1,69 @@
|
||||
"""TOM template generator — creates TOM checklist based on regulatory flags."""
|
||||
|
||||
_BASE_TOMS = [
|
||||
{"category": "Zutrittskontrolle", "name": "Physische Zugangskontrollen", "description": "Schlüssel, Kartenleser, Videoüberwachung"},
|
||||
{"category": "Zugangskontrolle", "name": "Authentifizierung", "description": "Passwortrichtlinien, MFA, SSO"},
|
||||
{"category": "Zugriffskontrolle", "name": "Berechtigungskonzept", "description": "RBAC, Least Privilege, regelmäßige Reviews"},
|
||||
{"category": "Weitergabekontrolle", "name": "Verschlüsselung im Transit", "description": "TLS 1.3 für alle Verbindungen"},
|
||||
{"category": "Eingabekontrolle", "name": "Audit-Logging", "description": "Protokollierung aller Datenänderungen"},
|
||||
{"category": "Auftragskontrolle", "name": "AV-Verträge", "description": "Art. 28 DSGVO Auftragsverarbeitungsverträge"},
|
||||
{"category": "Verfügbarkeitskontrolle", "name": "Backup & Recovery", "description": "Regelmäßige Backups, Disaster Recovery Plan"},
|
||||
{"category": "Trennungsgebot", "name": "Mandantentrennung", "description": "Logische Datentrennung nach Mandanten"},
|
||||
]
|
||||
|
||||
_NIS2_TOMS = [
|
||||
{"category": "Cybersicherheit", "name": "Incident Response Plan", "description": "NIS2-konformer Vorfallreaktionsplan (72h Meldepflicht)"},
|
||||
{"category": "Cybersicherheit", "name": "Supply Chain Security", "description": "Bewertung der Lieferkettensicherheit"},
|
||||
{"category": "Cybersicherheit", "name": "Vulnerability Management", "description": "Regelmäßige Schwachstellenscans und Patch-Management"},
|
||||
]
|
||||
|
||||
_ISO27001_TOMS = [
|
||||
{"category": "ISMS", "name": "Risikomanagement", "description": "ISO 27001 Anhang A — Informationssicherheits-Risikobewertung"},
|
||||
{"category": "ISMS", "name": "Dokumentenlenkung", "description": "Versionierte Sicherheitsrichtlinien und -verfahren"},
|
||||
{"category": "ISMS", "name": "Management Review", "description": "Jährliche Überprüfung des ISMS durch die Geschäftsleitung"},
|
||||
]
|
||||
|
||||
_AI_ACT_TOMS = [
|
||||
{"category": "KI-Compliance", "name": "KI-Risikoklassifizierung", "description": "Bewertung aller KI-Systeme nach EU AI Act Risikokategorien"},
|
||||
{"category": "KI-Compliance", "name": "Human Oversight", "description": "Menschliche Aufsicht für Hochrisiko-KI-Systeme sicherstellen"},
|
||||
{"category": "KI-Compliance", "name": "KI-Transparenz", "description": "Transparenzpflichten bei KI-Einsatz gegenüber Betroffenen"},
|
||||
]
|
||||
|
||||
|
||||
def generate_tom_drafts(ctx: dict) -> list[dict]:
|
||||
"""Generate TOM measure drafts based on regulatory flags.
|
||||
|
||||
Args:
|
||||
ctx: Flat dict from company-profile/template-context
|
||||
|
||||
Returns:
|
||||
List of TOM measure dicts ready for creation
|
||||
"""
|
||||
measures = list(_BASE_TOMS)
|
||||
|
||||
if ctx.get("subject_to_nis2"):
|
||||
measures.extend(_NIS2_TOMS)
|
||||
|
||||
if ctx.get("subject_to_iso27001"):
|
||||
measures.extend(_ISO27001_TOMS)
|
||||
|
||||
if ctx.get("subject_to_ai_act") or ctx.get("has_ai_systems"):
|
||||
measures.extend(_AI_ACT_TOMS)
|
||||
|
||||
# Enrich with metadata
|
||||
company = ctx.get("company_name", "")
|
||||
result = []
|
||||
for i, m in enumerate(measures, 1):
|
||||
result.append({
|
||||
"control_id": f"TOM-AUTO-{i:03d}",
|
||||
"name": m["name"],
|
||||
"description": m["description"],
|
||||
"category": m["category"],
|
||||
"type": "technical" if m["category"] in ("Zugangskontrolle", "Zugriffskontrolle", "Weitergabekontrolle", "Cybersicherheit") else "organizational",
|
||||
"implementation_status": "planned",
|
||||
"responsible_department": "IT-Sicherheit",
|
||||
"priority": "high" if "KI" in m.get("category", "") or "Cyber" in m.get("category", "") else "medium",
|
||||
"review_frequency": f"{ctx.get('review_cycle_months', 12)} Monate",
|
||||
})
|
||||
|
||||
return result
|
||||
@@ -0,0 +1,53 @@
|
||||
"""VVT template generator — creates VVT activity drafts per processing system."""
|
||||
|
||||
|
||||
def generate_vvt_drafts(ctx: dict) -> list[dict]:
|
||||
"""Generate VVT activity drafts, one per processing system.
|
||||
|
||||
Args:
|
||||
ctx: Flat dict from company-profile/template-context
|
||||
|
||||
Returns:
|
||||
List of VVT activity dicts ready for creation
|
||||
"""
|
||||
systems = ctx.get("processing_systems", [])
|
||||
company = ctx.get("company_name", "Unbekannt")
|
||||
dpo = ctx.get("dpo_name", "")
|
||||
activities = []
|
||||
|
||||
for i, system in enumerate(systems, 1):
|
||||
name = system.get("name", f"System {i}")
|
||||
vendor = system.get("vendor", "")
|
||||
hosting = system.get("hosting", "on-premise")
|
||||
categories = system.get("personal_data_categories", [])
|
||||
|
||||
activity = {
|
||||
"vvt_id": f"VVT-AUTO-{i:03d}",
|
||||
"name": f"Verarbeitung in {name}",
|
||||
"description": f"Automatisch generierter VVT-Eintrag für das System '{name}'"
|
||||
+ (f" (Anbieter: {vendor})" if vendor else ""),
|
||||
"purposes": [f"Datenverarbeitung via {name}"],
|
||||
"legal_bases": ["Art. 6 Abs. 1b DSGVO — Vertragserfüllung"],
|
||||
"data_subject_categories": [],
|
||||
"personal_data_categories": categories,
|
||||
"recipient_categories": [vendor] if vendor else [],
|
||||
"third_country_transfers": _assess_third_country(hosting),
|
||||
"retention_period": {"default": "Gemäß Löschfristenkatalog"},
|
||||
"tom_description": f"Siehe TOM-Katalog für {name}",
|
||||
"business_function": "IT",
|
||||
"systems": [name],
|
||||
"deployment_model": hosting,
|
||||
"protection_level": "HIGH" if categories else "MEDIUM",
|
||||
"dpia_required": len(categories) > 3,
|
||||
"status": "DRAFT",
|
||||
"responsible": dpo or company,
|
||||
}
|
||||
activities.append(activity)
|
||||
|
||||
return activities
|
||||
|
||||
|
||||
def _assess_third_country(hosting: str) -> list:
|
||||
if hosting in ("us-cloud", "international"):
|
||||
return [{"country": "USA", "mechanism": "EU-US Data Privacy Framework"}]
|
||||
return []
|
||||
@@ -33,7 +33,11 @@ from classroom_engine.database import get_db
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/dsfa", tags=["compliance-dsfa"])
|
||||
|
||||
DEFAULT_TENANT_ID = "default"
|
||||
from .tenant_utils import get_tenant_id as _shared_get_tenant_id
|
||||
|
||||
# Legacy compat — still used by _get_tenant_id() below; will be removed once
|
||||
# all call-sites switch to Depends(get_tenant_id).
|
||||
DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
||||
|
||||
VALID_STATUSES = {"draft", "in-review", "approved", "needs-update"}
|
||||
VALID_RISK_LEVELS = {"low", "medium", "high", "critical"}
|
||||
@@ -909,3 +913,33 @@ async def export_dsfa_json(
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Versioning
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/{dsfa_id}/versions")
|
||||
async def list_dsfa_versions(
|
||||
dsfa_id: str,
|
||||
tenant_id: Optional[str] = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all versions for a DSFA."""
|
||||
from .versioning_utils import list_versions
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
return list_versions(db, "dsfa", dsfa_id, tid)
|
||||
|
||||
|
||||
@router.get("/{dsfa_id}/versions/{version_number}")
|
||||
async def get_dsfa_version(
|
||||
dsfa_id: str,
|
||||
version_number: int,
|
||||
tenant_id: Optional[str] = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a specific DSFA version with full snapshot."""
|
||||
from .versioning_utils import get_version
|
||||
tid = _get_tenant_id(tenant_id)
|
||||
v = get_version(db, "dsfa", dsfa_id, version_number, tid)
|
||||
if not v:
|
||||
raise HTTPException(status_code=404, detail=f"Version {version_number} not found")
|
||||
return v
|
||||
|
||||
186
backend-compliance/compliance/api/generation_routes.py
Normal file
186
backend-compliance/compliance/api/generation_routes.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""
|
||||
FastAPI routes for Document Generation from Stammdaten.
|
||||
|
||||
Endpoints:
|
||||
GET /generation/preview/{doc_type} — Markdown preview from Stammdaten
|
||||
POST /generation/apply/{doc_type} — Generate drafts → create Change-Requests
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from classroom_engine.database import get_db
|
||||
from .tenant_utils import get_tenant_id
|
||||
from .document_templates import (
|
||||
generate_dsfa_draft,
|
||||
generate_vvt_drafts,
|
||||
generate_loeschfristen_drafts,
|
||||
generate_tom_drafts,
|
||||
generate_obligation_drafts,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/generation", tags=["generation"])
|
||||
|
||||
VALID_DOC_TYPES = {"dsfa", "vvt", "tom", "loeschfristen", "obligation"}
|
||||
|
||||
|
||||
def _get_template_context(db, tid: str) -> dict:
|
||||
"""Fetch company profile and build template context."""
|
||||
from .company_profile_routes import _BASE_COLUMNS, row_to_response
|
||||
from database import SessionLocal
|
||||
|
||||
# Use a fresh session for company_profile (different DB import pattern)
|
||||
cp_db = SessionLocal()
|
||||
try:
|
||||
result = cp_db.execute(
|
||||
f"SELECT {_BASE_COLUMNS} FROM compliance_company_profiles WHERE tenant_id = :tid",
|
||||
{"tid": tid},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
resp = row_to_response(row)
|
||||
# Build flat context
|
||||
return {
|
||||
"company_name": resp.company_name,
|
||||
"legal_form": resp.legal_form,
|
||||
"industry": resp.industry,
|
||||
"business_model": resp.business_model,
|
||||
"company_size": resp.company_size,
|
||||
"employee_count": resp.employee_count,
|
||||
"headquarters_country": resp.headquarters_country,
|
||||
"headquarters_city": resp.headquarters_city,
|
||||
"primary_jurisdiction": resp.primary_jurisdiction,
|
||||
"is_data_controller": resp.is_data_controller,
|
||||
"is_data_processor": resp.is_data_processor,
|
||||
"uses_ai": resp.uses_ai,
|
||||
"dpo_name": resp.dpo_name or "",
|
||||
"dpo_email": resp.dpo_email or "",
|
||||
"supervisory_authority": resp.supervisory_authority or "",
|
||||
"review_cycle_months": resp.review_cycle_months,
|
||||
"subject_to_nis2": resp.subject_to_nis2,
|
||||
"subject_to_ai_act": resp.subject_to_ai_act,
|
||||
"subject_to_iso27001": resp.subject_to_iso27001,
|
||||
"offerings": resp.offerings,
|
||||
"target_markets": resp.target_markets,
|
||||
"ai_use_cases": resp.ai_use_cases,
|
||||
"repos": resp.repos,
|
||||
"document_sources": resp.document_sources,
|
||||
"processing_systems": resp.processing_systems,
|
||||
"ai_systems": resp.ai_systems,
|
||||
"technical_contacts": resp.technical_contacts,
|
||||
"has_ai_systems": len(resp.ai_systems) > 0,
|
||||
"processing_system_count": len(resp.processing_systems),
|
||||
"ai_system_count": len(resp.ai_systems),
|
||||
"is_complete": resp.is_complete,
|
||||
}
|
||||
finally:
|
||||
cp_db.close()
|
||||
|
||||
|
||||
def _generate_for_type(doc_type: str, ctx: dict):
|
||||
"""Call the appropriate template generator."""
|
||||
if doc_type == "dsfa":
|
||||
return [generate_dsfa_draft(ctx)]
|
||||
elif doc_type == "vvt":
|
||||
return generate_vvt_drafts(ctx)
|
||||
elif doc_type == "tom":
|
||||
return generate_tom_drafts(ctx)
|
||||
elif doc_type == "loeschfristen":
|
||||
return generate_loeschfristen_drafts(ctx)
|
||||
elif doc_type == "obligation":
|
||||
return generate_obligation_drafts(ctx)
|
||||
else:
|
||||
raise ValueError(f"Unknown doc_type: {doc_type}")
|
||||
|
||||
|
||||
@router.get("/preview/{doc_type}")
|
||||
async def preview_generation(
|
||||
doc_type: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Preview what documents would be generated (no DB writes)."""
|
||||
if doc_type not in VALID_DOC_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid doc_type: {doc_type}. Valid: {VALID_DOC_TYPES}")
|
||||
|
||||
ctx = _get_template_context(db, tid)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail="Company profile not found — fill Stammdaten first")
|
||||
|
||||
drafts = _generate_for_type(doc_type, ctx)
|
||||
|
||||
return {
|
||||
"doc_type": doc_type,
|
||||
"count": len(drafts),
|
||||
"drafts": drafts,
|
||||
"company_name": ctx.get("company_name"),
|
||||
"is_preview": True,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/apply/{doc_type}")
|
||||
async def apply_generation(
|
||||
doc_type: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
x_user_id: Optional[str] = Header(None, alias="X-User-ID"),
|
||||
):
|
||||
"""Generate drafts and create Change-Requests for each.
|
||||
|
||||
Does NOT create documents directly — all go through the CR inbox.
|
||||
"""
|
||||
if doc_type not in VALID_DOC_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid doc_type: {doc_type}. Valid: {VALID_DOC_TYPES}")
|
||||
|
||||
ctx = _get_template_context(db, tid)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail="Company profile not found — fill Stammdaten first")
|
||||
|
||||
drafts = _generate_for_type(doc_type, ctx)
|
||||
user = x_user_id or "system"
|
||||
|
||||
cr_ids = []
|
||||
for draft in drafts:
|
||||
title = draft.get("title") or draft.get("name") or draft.get("data_category") or f"Neues {doc_type}-Dokument"
|
||||
try:
|
||||
result = db.execute(
|
||||
text("""
|
||||
INSERT INTO compliance_change_requests
|
||||
(tenant_id, trigger_type, target_document_type,
|
||||
proposal_title, proposal_body, proposed_changes,
|
||||
priority, created_by)
|
||||
VALUES (:tid, 'generation', :doc_type,
|
||||
:title, :body, CAST(:changes AS jsonb),
|
||||
'normal', :user)
|
||||
RETURNING id
|
||||
"""),
|
||||
{
|
||||
"tid": tid,
|
||||
"doc_type": doc_type,
|
||||
"title": f"[Generiert] {title}",
|
||||
"body": f"Automatisch aus Stammdaten generiert für {ctx.get('company_name', '')}",
|
||||
"changes": json.dumps(draft),
|
||||
"user": user,
|
||||
},
|
||||
)
|
||||
row = result.fetchone()
|
||||
if row:
|
||||
cr_ids.append(str(row[0]))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create CR for draft: {e}")
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"doc_type": doc_type,
|
||||
"drafts_generated": len(drafts),
|
||||
"change_requests_created": len(cr_ids),
|
||||
"change_request_ids": cr_ids,
|
||||
}
|
||||
@@ -352,3 +352,35 @@ async def delete_loeschfrist(
|
||||
db.commit()
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(status_code=404, detail="Loeschfrist not found")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Versioning
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/{policy_id}/versions")
|
||||
async def list_loeschfristen_versions(
|
||||
policy_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
x_tenant_id: Optional[str] = Header(None),
|
||||
):
|
||||
"""List all versions for a Loeschfrist."""
|
||||
from .versioning_utils import list_versions
|
||||
tenant_id = _get_tenant_id(x_tenant_id)
|
||||
return list_versions(db, "loeschfristen", policy_id, tenant_id)
|
||||
|
||||
|
||||
@router.get("/{policy_id}/versions/{version_number}")
|
||||
async def get_loeschfristen_version(
|
||||
policy_id: str,
|
||||
version_number: int,
|
||||
db: Session = Depends(get_db),
|
||||
x_tenant_id: Optional[str] = Header(None),
|
||||
):
|
||||
"""Get a specific Loeschfristen version with full snapshot."""
|
||||
from .versioning_utils import get_version
|
||||
tenant_id = _get_tenant_id(x_tenant_id)
|
||||
v = get_version(db, "loeschfristen", policy_id, version_number, tenant_id)
|
||||
if not v:
|
||||
raise HTTPException(status_code=404, detail=f"Version {version_number} not found")
|
||||
return v
|
||||
|
||||
@@ -324,3 +324,35 @@ async def delete_obligation(
|
||||
db.commit()
|
||||
if result.rowcount == 0:
|
||||
raise HTTPException(status_code=404, detail="Obligation not found")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Versioning
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/{obligation_id}/versions")
|
||||
async def list_obligation_versions(
|
||||
obligation_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
x_tenant_id: Optional[str] = Header(None),
|
||||
):
|
||||
"""List all versions for an Obligation."""
|
||||
from .versioning_utils import list_versions
|
||||
tenant_id = _get_tenant_id(x_tenant_id)
|
||||
return list_versions(db, "obligation", obligation_id, tenant_id)
|
||||
|
||||
|
||||
@router.get("/{obligation_id}/versions/{version_number}")
|
||||
async def get_obligation_version(
|
||||
obligation_id: str,
|
||||
version_number: int,
|
||||
db: Session = Depends(get_db),
|
||||
x_tenant_id: Optional[str] = Header(None),
|
||||
):
|
||||
"""Get a specific Obligation version with full snapshot."""
|
||||
from .versioning_utils import get_version
|
||||
tenant_id = _get_tenant_id(x_tenant_id)
|
||||
v = get_version(db, "obligation", obligation_id, version_number, tenant_id)
|
||||
if not v:
|
||||
raise HTTPException(status_code=404, detail=f"Version {version_number} not found")
|
||||
return v
|
||||
|
||||
58
backend-compliance/compliance/api/tenant_utils.py
Normal file
58
backend-compliance/compliance/api/tenant_utils.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""
|
||||
Shared tenant middleware for all Compliance API routes.
|
||||
|
||||
Provides a central FastAPI dependency that resolves tenant_id from:
|
||||
1. X-Tenant-ID header (primary)
|
||||
2. Query parameter tenant_id (fallback)
|
||||
3. Environment variable DEFAULT_TENANT_ID (last resort)
|
||||
|
||||
UUID validation ensures no more "default" strings leak through.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Header, Query, HTTPException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Fallback for local development — real deployments must pass X-Tenant-ID
|
||||
_ENV_DEFAULT = os.getenv(
|
||||
"DEFAULT_TENANT_ID", "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
||||
)
|
||||
|
||||
_UUID_RE = re.compile(
|
||||
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", re.I
|
||||
)
|
||||
|
||||
|
||||
def _validate_tenant_id(tid: str) -> str:
|
||||
"""Validate that tenant_id looks like a UUID. Reject 'default' etc."""
|
||||
if tid == "default":
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Tenant ID 'default' is no longer accepted. Pass a valid UUID.",
|
||||
)
|
||||
if not _UUID_RE.match(tid):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid tenant_id format: '{tid}'. Must be a UUID.",
|
||||
)
|
||||
return tid
|
||||
|
||||
|
||||
async def get_tenant_id(
|
||||
x_tenant_id: Optional[str] = Header(None, alias="X-Tenant-ID"),
|
||||
tenant_id: Optional[str] = Query(None),
|
||||
) -> str:
|
||||
"""FastAPI dependency — resolves + validates tenant ID.
|
||||
|
||||
Usage:
|
||||
@router.get("/something")
|
||||
async def my_endpoint(tid: str = Depends(get_tenant_id)):
|
||||
...
|
||||
"""
|
||||
raw = x_tenant_id or tenant_id or _ENV_DEFAULT
|
||||
return _validate_tenant_id(raw)
|
||||
@@ -573,3 +573,37 @@ async def export_measures(
|
||||
media_type="text/csv; charset=utf-8",
|
||||
headers={"Content-Disposition": "attachment; filename=tom_export.csv"},
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Versioning
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/measures/{measure_id}/versions")
|
||||
async def list_measure_versions(
|
||||
measure_id: str,
|
||||
tenant_id: Optional[str] = Query(None, alias="tenant_id"),
|
||||
tenantId: Optional[str] = Query(None, alias="tenantId"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all versions for a TOM measure."""
|
||||
from .versioning_utils import list_versions
|
||||
tid = tenant_id or tenantId or DEFAULT_TENANT_ID
|
||||
return list_versions(db, "tom", measure_id, tid)
|
||||
|
||||
|
||||
@router.get("/measures/{measure_id}/versions/{version_number}")
|
||||
async def get_measure_version(
|
||||
measure_id: str,
|
||||
version_number: int,
|
||||
tenant_id: Optional[str] = Query(None, alias="tenant_id"),
|
||||
tenantId: Optional[str] = Query(None, alias="tenantId"),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a specific TOM measure version with full snapshot."""
|
||||
from .versioning_utils import get_version
|
||||
tid = tenant_id or tenantId or DEFAULT_TENANT_ID
|
||||
v = get_version(db, "tom", measure_id, version_number, tid)
|
||||
if not v:
|
||||
raise HTTPException(status_code=404, detail=f"Version {version_number} not found")
|
||||
return v
|
||||
|
||||
@@ -62,7 +62,8 @@ from classroom_engine.database import get_db
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/vendor-compliance", tags=["vendor-compliance"])
|
||||
|
||||
DEFAULT_TENANT_ID = "default"
|
||||
# Default tenant UUID — "default" string no longer accepted
|
||||
DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
||||
|
||||
# =============================================================================
|
||||
# Helpers
|
||||
|
||||
175
backend-compliance/compliance/api/versioning_utils.py
Normal file
175
backend-compliance/compliance/api/versioning_utils.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""
|
||||
Shared versioning utilities for all compliance document types.
|
||||
|
||||
Provides create_version_snapshot() and list_versions() helpers that work
|
||||
with all 5 version tables (DSFA, VVT, TOM, Loeschfristen, Obligations).
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Table → FK column mapping
|
||||
VERSION_TABLES = {
|
||||
"dsfa": ("compliance_dsfa_versions", "dsfa_id", "compliance_dsfas"),
|
||||
"vvt_activity": ("compliance_vvt_activity_versions", "activity_id", "compliance_vvt_activities"),
|
||||
"tom": ("compliance_tom_versions", "measure_id", "compliance_tom_measures"),
|
||||
"loeschfristen": ("compliance_loeschfristen_versions", "policy_id", "compliance_loeschfristen"),
|
||||
"obligation": ("compliance_obligation_versions", "obligation_id", "compliance_obligations"),
|
||||
}
|
||||
|
||||
|
||||
def create_version_snapshot(
|
||||
db: Session,
|
||||
doc_type: str,
|
||||
doc_id: str,
|
||||
tenant_id: str,
|
||||
snapshot: dict,
|
||||
change_summary: str = "",
|
||||
changed_sections: list = None,
|
||||
created_by: str = "system",
|
||||
) -> dict:
|
||||
"""Create a new version snapshot for any document type.
|
||||
|
||||
Args:
|
||||
doc_type: One of "dsfa", "vvt_activity", "tom", "loeschfristen", "obligation"
|
||||
doc_id: UUID of the source document
|
||||
tenant_id: Tenant UUID
|
||||
snapshot: Full JSONB snapshot of the document state
|
||||
change_summary: Human-readable summary of changes
|
||||
changed_sections: List of section identifiers that changed
|
||||
created_by: User who created this version
|
||||
|
||||
Returns:
|
||||
Dict with version info (id, version_number, created_at)
|
||||
"""
|
||||
if doc_type not in VERSION_TABLES:
|
||||
raise ValueError(f"Unknown document type: {doc_type}")
|
||||
|
||||
version_table, fk_column, source_table = VERSION_TABLES[doc_type]
|
||||
|
||||
# Get next version number
|
||||
result = db.execute(
|
||||
text(f"SELECT COALESCE(MAX(version_number), 0) FROM {version_table} WHERE {fk_column} = :doc_id"),
|
||||
{"doc_id": doc_id},
|
||||
)
|
||||
next_version = result.scalar() + 1
|
||||
|
||||
# Insert version
|
||||
result = db.execute(
|
||||
text(f"""
|
||||
INSERT INTO {version_table}
|
||||
({fk_column}, tenant_id, version_number, snapshot, change_summary,
|
||||
changed_sections, created_by)
|
||||
VALUES (:doc_id, :tenant_id, :version_number, CAST(:snapshot AS jsonb),
|
||||
:change_summary, CAST(:changed_sections AS jsonb), :created_by)
|
||||
RETURNING id, version_number, created_at
|
||||
"""),
|
||||
{
|
||||
"doc_id": doc_id,
|
||||
"tenant_id": tenant_id,
|
||||
"version_number": next_version,
|
||||
"snapshot": json.dumps(snapshot),
|
||||
"change_summary": change_summary,
|
||||
"changed_sections": json.dumps(changed_sections or []),
|
||||
"created_by": created_by,
|
||||
},
|
||||
)
|
||||
row = result.fetchone()
|
||||
|
||||
# Update current_version on the source table
|
||||
db.execute(
|
||||
text(f"UPDATE {source_table} SET current_version = :v WHERE id = :doc_id"),
|
||||
{"v": next_version, "doc_id": doc_id},
|
||||
)
|
||||
|
||||
return {
|
||||
"id": str(row[0]),
|
||||
"version_number": row[1],
|
||||
"created_at": row[2].isoformat() if row[2] else None,
|
||||
}
|
||||
|
||||
|
||||
def list_versions(
|
||||
db: Session,
|
||||
doc_type: str,
|
||||
doc_id: str,
|
||||
tenant_id: str,
|
||||
) -> List[dict]:
|
||||
"""List all versions for a document, newest first."""
|
||||
if doc_type not in VERSION_TABLES:
|
||||
raise ValueError(f"Unknown document type: {doc_type}")
|
||||
|
||||
version_table, fk_column, _ = VERSION_TABLES[doc_type]
|
||||
|
||||
result = db.execute(
|
||||
text(f"""
|
||||
SELECT id, version_number, status, change_summary, changed_sections,
|
||||
created_by, approved_by, approved_at, created_at
|
||||
FROM {version_table}
|
||||
WHERE {fk_column} = :doc_id AND tenant_id = :tenant_id
|
||||
ORDER BY version_number DESC
|
||||
"""),
|
||||
{"doc_id": doc_id, "tenant_id": tenant_id},
|
||||
)
|
||||
rows = result.fetchall()
|
||||
return [
|
||||
{
|
||||
"id": str(r[0]),
|
||||
"version_number": r[1],
|
||||
"status": r[2],
|
||||
"change_summary": r[3],
|
||||
"changed_sections": r[4] or [],
|
||||
"created_by": r[5],
|
||||
"approved_by": r[6],
|
||||
"approved_at": r[7].isoformat() if r[7] else None,
|
||||
"created_at": r[8].isoformat() if r[8] else None,
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
|
||||
def get_version(
|
||||
db: Session,
|
||||
doc_type: str,
|
||||
doc_id: str,
|
||||
version_number: int,
|
||||
tenant_id: str,
|
||||
) -> Optional[dict]:
|
||||
"""Get a specific version with its full snapshot."""
|
||||
if doc_type not in VERSION_TABLES:
|
||||
raise ValueError(f"Unknown document type: {doc_type}")
|
||||
|
||||
version_table, fk_column, _ = VERSION_TABLES[doc_type]
|
||||
|
||||
result = db.execute(
|
||||
text(f"""
|
||||
SELECT id, version_number, status, snapshot, change_summary,
|
||||
changed_sections, created_by, approved_by, approved_at, created_at
|
||||
FROM {version_table}
|
||||
WHERE {fk_column} = :doc_id AND version_number = :v AND tenant_id = :tenant_id
|
||||
"""),
|
||||
{"doc_id": doc_id, "v": version_number, "tenant_id": tenant_id},
|
||||
)
|
||||
r = result.fetchone()
|
||||
if not r:
|
||||
return None
|
||||
|
||||
return {
|
||||
"id": str(r[0]),
|
||||
"version_number": r[1],
|
||||
"status": r[2],
|
||||
"snapshot": r[3],
|
||||
"change_summary": r[4],
|
||||
"changed_sections": r[5] or [],
|
||||
"created_by": r[6],
|
||||
"approved_by": r[7],
|
||||
"approved_at": r[8].isoformat() if r[8] else None,
|
||||
"created_at": r[9].isoformat() if r[9] else None,
|
||||
}
|
||||
@@ -33,6 +33,7 @@ from .schemas import (
|
||||
VVTActivityCreate, VVTActivityUpdate, VVTActivityResponse,
|
||||
VVTStatsResponse, VVTAuditLogEntry,
|
||||
)
|
||||
from .tenant_utils import get_tenant_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/vvt", tags=["compliance-vvt"])
|
||||
@@ -40,6 +41,7 @@ router = APIRouter(prefix="/vvt", tags=["compliance-vvt"])
|
||||
|
||||
def _log_audit(
|
||||
db: Session,
|
||||
tenant_id: str,
|
||||
action: str,
|
||||
entity_type: str,
|
||||
entity_id=None,
|
||||
@@ -48,6 +50,7 @@ def _log_audit(
|
||||
new_values=None,
|
||||
):
|
||||
entry = VVTAuditLogDB(
|
||||
tenant_id=tenant_id,
|
||||
action=action,
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id,
|
||||
@@ -63,9 +66,17 @@ def _log_audit(
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/organization", response_model=Optional[VVTOrganizationResponse])
|
||||
async def get_organization(db: Session = Depends(get_db)):
|
||||
"""Load the VVT organization header (single record)."""
|
||||
org = db.query(VVTOrganizationDB).order_by(VVTOrganizationDB.created_at).first()
|
||||
async def get_organization(
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Load the VVT organization header for the given tenant."""
|
||||
org = (
|
||||
db.query(VVTOrganizationDB)
|
||||
.filter(VVTOrganizationDB.tenant_id == tid)
|
||||
.order_by(VVTOrganizationDB.created_at)
|
||||
.first()
|
||||
)
|
||||
if not org:
|
||||
return None
|
||||
return VVTOrganizationResponse(
|
||||
@@ -88,15 +99,22 @@ async def get_organization(db: Session = Depends(get_db)):
|
||||
@router.put("/organization", response_model=VVTOrganizationResponse)
|
||||
async def upsert_organization(
|
||||
request: VVTOrganizationUpdate,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Create or update the VVT organization header."""
|
||||
org = db.query(VVTOrganizationDB).order_by(VVTOrganizationDB.created_at).first()
|
||||
org = (
|
||||
db.query(VVTOrganizationDB)
|
||||
.filter(VVTOrganizationDB.tenant_id == tid)
|
||||
.order_by(VVTOrganizationDB.created_at)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not org:
|
||||
data = request.dict(exclude_none=True)
|
||||
if 'organization_name' not in data:
|
||||
data['organization_name'] = 'Meine Organisation'
|
||||
data['tenant_id'] = tid
|
||||
org = VVTOrganizationDB(**data)
|
||||
db.add(org)
|
||||
else:
|
||||
@@ -168,10 +186,11 @@ async def list_activities(
|
||||
business_function: Optional[str] = Query(None),
|
||||
search: Optional[str] = Query(None),
|
||||
review_overdue: Optional[bool] = Query(None),
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all processing activities with optional filters."""
|
||||
query = db.query(VVTActivityDB)
|
||||
query = db.query(VVTActivityDB).filter(VVTActivityDB.tenant_id == tid)
|
||||
|
||||
if status:
|
||||
query = query.filter(VVTActivityDB.status == status)
|
||||
@@ -199,12 +218,14 @@ async def list_activities(
|
||||
async def create_activity(
|
||||
request: VVTActivityCreate,
|
||||
http_request: Request,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Create a new processing activity."""
|
||||
# Check for duplicate vvt_id
|
||||
# Check for duplicate vvt_id within tenant
|
||||
existing = db.query(VVTActivityDB).filter(
|
||||
VVTActivityDB.vvt_id == request.vvt_id
|
||||
VVTActivityDB.tenant_id == tid,
|
||||
VVTActivityDB.vvt_id == request.vvt_id,
|
||||
).first()
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
@@ -213,6 +234,7 @@ async def create_activity(
|
||||
)
|
||||
|
||||
data = request.dict()
|
||||
data['tenant_id'] = tid
|
||||
# Set created_by from X-User-ID header if not provided in body
|
||||
if not data.get('created_by'):
|
||||
data['created_by'] = http_request.headers.get('X-User-ID', 'system')
|
||||
@@ -223,6 +245,7 @@ async def create_activity(
|
||||
|
||||
_log_audit(
|
||||
db,
|
||||
tenant_id=tid,
|
||||
action="CREATE",
|
||||
entity_type="activity",
|
||||
entity_id=act.id,
|
||||
@@ -235,9 +258,16 @@ async def create_activity(
|
||||
|
||||
|
||||
@router.get("/activities/{activity_id}", response_model=VVTActivityResponse)
|
||||
async def get_activity(activity_id: str, db: Session = Depends(get_db)):
|
||||
async def get_activity(
|
||||
activity_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a single processing activity by ID."""
|
||||
act = db.query(VVTActivityDB).filter(VVTActivityDB.id == activity_id).first()
|
||||
act = db.query(VVTActivityDB).filter(
|
||||
VVTActivityDB.id == activity_id,
|
||||
VVTActivityDB.tenant_id == tid,
|
||||
).first()
|
||||
if not act:
|
||||
raise HTTPException(status_code=404, detail=f"Activity {activity_id} not found")
|
||||
return _activity_to_response(act)
|
||||
@@ -247,10 +277,14 @@ async def get_activity(activity_id: str, db: Session = Depends(get_db)):
|
||||
async def update_activity(
|
||||
activity_id: str,
|
||||
request: VVTActivityUpdate,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Update a processing activity."""
|
||||
act = db.query(VVTActivityDB).filter(VVTActivityDB.id == activity_id).first()
|
||||
act = db.query(VVTActivityDB).filter(
|
||||
VVTActivityDB.id == activity_id,
|
||||
VVTActivityDB.tenant_id == tid,
|
||||
).first()
|
||||
if not act:
|
||||
raise HTTPException(status_code=404, detail=f"Activity {activity_id} not found")
|
||||
|
||||
@@ -262,6 +296,7 @@ async def update_activity(
|
||||
|
||||
_log_audit(
|
||||
db,
|
||||
tenant_id=tid,
|
||||
action="UPDATE",
|
||||
entity_type="activity",
|
||||
entity_id=act.id,
|
||||
@@ -275,14 +310,22 @@ async def update_activity(
|
||||
|
||||
|
||||
@router.delete("/activities/{activity_id}")
|
||||
async def delete_activity(activity_id: str, db: Session = Depends(get_db)):
|
||||
async def delete_activity(
|
||||
activity_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Delete a processing activity."""
|
||||
act = db.query(VVTActivityDB).filter(VVTActivityDB.id == activity_id).first()
|
||||
act = db.query(VVTActivityDB).filter(
|
||||
VVTActivityDB.id == activity_id,
|
||||
VVTActivityDB.tenant_id == tid,
|
||||
).first()
|
||||
if not act:
|
||||
raise HTTPException(status_code=404, detail=f"Activity {activity_id} not found")
|
||||
|
||||
_log_audit(
|
||||
db,
|
||||
tenant_id=tid,
|
||||
action="DELETE",
|
||||
entity_type="activity",
|
||||
entity_id=act.id,
|
||||
@@ -302,11 +345,13 @@ async def delete_activity(activity_id: str, db: Session = Depends(get_db)):
|
||||
async def get_audit_log(
|
||||
limit: int = Query(50, ge=1, le=500),
|
||||
offset: int = Query(0, ge=0),
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get the VVT audit trail."""
|
||||
entries = (
|
||||
db.query(VVTAuditLogDB)
|
||||
.filter(VVTAuditLogDB.tenant_id == tid)
|
||||
.order_by(VVTAuditLogDB.created_at.desc())
|
||||
.offset(offset)
|
||||
.limit(limit)
|
||||
@@ -334,14 +379,26 @@ async def get_audit_log(
|
||||
@router.get("/export")
|
||||
async def export_activities(
|
||||
format: str = Query("json", pattern="^(json|csv)$"),
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Export all activities as JSON or CSV (semicolon-separated, DE locale)."""
|
||||
org = db.query(VVTOrganizationDB).order_by(VVTOrganizationDB.created_at).first()
|
||||
activities = db.query(VVTActivityDB).order_by(VVTActivityDB.created_at).all()
|
||||
org = (
|
||||
db.query(VVTOrganizationDB)
|
||||
.filter(VVTOrganizationDB.tenant_id == tid)
|
||||
.order_by(VVTOrganizationDB.created_at)
|
||||
.first()
|
||||
)
|
||||
activities = (
|
||||
db.query(VVTActivityDB)
|
||||
.filter(VVTActivityDB.tenant_id == tid)
|
||||
.order_by(VVTActivityDB.created_at)
|
||||
.all()
|
||||
)
|
||||
|
||||
_log_audit(
|
||||
db,
|
||||
tenant_id=tid,
|
||||
action="EXPORT",
|
||||
entity_type="all_activities",
|
||||
new_values={"count": len(activities), "format": format},
|
||||
@@ -432,9 +489,12 @@ def _export_csv(activities: list) -> StreamingResponse:
|
||||
|
||||
|
||||
@router.get("/stats", response_model=VVTStatsResponse)
|
||||
async def get_stats(db: Session = Depends(get_db)):
|
||||
async def get_stats(
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get VVT statistics summary."""
|
||||
activities = db.query(VVTActivityDB).all()
|
||||
activities = db.query(VVTActivityDB).filter(VVTActivityDB.tenant_id == tid).all()
|
||||
|
||||
by_status: dict = {}
|
||||
by_bf: dict = {}
|
||||
@@ -459,3 +519,33 @@ async def get_stats(db: Session = Depends(get_db)):
|
||||
approved_count=by_status.get('APPROVED', 0),
|
||||
overdue_review_count=overdue_count,
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Versioning
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/activities/{activity_id}/versions")
|
||||
async def list_activity_versions(
|
||||
activity_id: str,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all versions for a VVT activity."""
|
||||
from .versioning_utils import list_versions
|
||||
return list_versions(db, "vvt_activity", activity_id, tid)
|
||||
|
||||
|
||||
@router.get("/activities/{activity_id}/versions/{version_number}")
|
||||
async def get_activity_version(
|
||||
activity_id: str,
|
||||
version_number: int,
|
||||
tid: str = Depends(get_tenant_id),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get a specific VVT activity version with full snapshot."""
|
||||
from .versioning_utils import get_version
|
||||
v = get_version(db, "vvt_activity", activity_id, version_number, tid)
|
||||
if not v:
|
||||
raise HTTPException(status_code=404, detail=f"Version {version_number} not found")
|
||||
return v
|
||||
|
||||
Reference in New Issue
Block a user