diff --git a/admin-compliance/app/sdk/audit-timeline/_hooks/useAuditTimeline.ts b/admin-compliance/app/sdk/audit-timeline/_hooks/useAuditTimeline.ts new file mode 100644 index 0000000..924d8ab --- /dev/null +++ b/admin-compliance/app/sdk/audit-timeline/_hooks/useAuditTimeline.ts @@ -0,0 +1,46 @@ +'use client' + +import { useState, useEffect } from 'react' + +export interface AuditEntry { + id: string + entity_type: string + entity_id: string + entity_name: string + action: string + field_changed: string | null + old_value: string | null + new_value: string | null + change_summary: string | null + performed_by: string + performed_at: string +} + +export function useAuditTimeline() { + const [entries, setEntries] = useState([]) + const [loading, setLoading] = useState(true) + const [filter, setFilter] = useState('all') + + useEffect(() => { + loadEntries() + }, [filter]) // eslint-disable-line react-hooks/exhaustive-deps + + async function loadEntries() { + setLoading(true) + try { + const params = new URLSearchParams({ limit: '100' }) + if (filter !== 'all') params.set('entity_type', filter) + const res = await fetch(`/api/sdk/v1/compliance/audit-trail?${params}`) + if (res.ok) { + const json = await res.json() + setEntries(json.entries || json.audit_trail || json || []) + } + } catch (err) { + console.error('Failed to load audit trail:', err) + } finally { + setLoading(false) + } + } + + return { entries, loading, filter, setFilter } +} diff --git a/admin-compliance/app/sdk/audit-timeline/page.tsx b/admin-compliance/app/sdk/audit-timeline/page.tsx new file mode 100644 index 0000000..0c62d37 --- /dev/null +++ b/admin-compliance/app/sdk/audit-timeline/page.tsx @@ -0,0 +1,117 @@ +'use client' + +import { useAuditTimeline, type AuditEntry } from './_hooks/useAuditTimeline' + +const ENTITY_LABELS: Record = { + evidence: 'Nachweis', control: 'Control', document: 'Dokument', + dsfa: 'DSFA', vvt: 'VVT', tom: 'TOM', policy: 'Richtlinie', + dsms_archive: 'DSMS-Archiv', risk: 'Risiko', +} + +const ACTION_COLORS: Record = { + create: 'bg-green-500', update: 'bg-blue-500', delete: 'bg-red-500', + approve: 'bg-purple-500', archive: 'bg-emerald-500', review: 'bg-yellow-500', + sign: 'bg-indigo-500', reject: 'bg-red-400', +} + +const FILTER_OPTIONS = ['all', 'evidence', 'dsms_archive', 'control', 'document', 'dsfa', 'vvt', 'tom'] + +export default function AuditTimelinePage() { + const { entries, loading, filter, setFilter } = useAuditTimeline() + + return ( +
+
+

Audit Timeline

+

Chronologische Compliance-Historie mit DSMS-Nachweisen

+
+ + {/* Filter */} +
+ {FILTER_OPTIONS.map((f) => ( + + ))} +
+ + {loading ? ( +
+
+
+ ) : entries.length === 0 ? ( +
+ Keine Eintraege gefunden. Compliance-Aktionen werden automatisch protokolliert. +
+ ) : ( +
+ {/* Timeline line */} +
+ +
+ {entries.map((entry) => ( + + ))} +
+
+ )} +
+ ) +} + +function TimelineEntry({ entry }: { entry: AuditEntry }) { + const dotColor = ACTION_COLORS[entry.action] || 'bg-gray-400' + const isCID = entry.field_changed === 'dsms_cid' || entry.action === 'archive' + const date = new Date(entry.performed_at) + + return ( +
+ {/* Dot */} +
+ + {/* Content */} +
+
+
+
+ {entry.entity_name} + + {ENTITY_LABELS[entry.entity_type] || entry.entity_type} + + + {entry.action} + +
+ {entry.change_summary && ( +

{entry.change_summary}

+ )} + {isCID && entry.new_value && ( +
+ + + + + {entry.new_value.length > 20 ? entry.new_value.slice(0, 8) + '...' + entry.new_value.slice(-6) : entry.new_value} + + DSMS/IPFS +
+ )} +
+
+
{date.toLocaleDateString('de-DE')}
+
{date.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit' })}
+
{entry.performed_by}
+
+
+
+
+ ) +} diff --git a/ai-compliance-sdk/internal/api/handlers/iace_handler_techfile.go b/ai-compliance-sdk/internal/api/handlers/iace_handler_techfile.go index 41452b9..909a8b3 100644 --- a/ai-compliance-sdk/internal/api/handlers/iace_handler_techfile.go +++ b/ai-compliance-sdk/internal/api/handlers/iace_handler_techfile.go @@ -5,6 +5,7 @@ import ( "net/http" "strings" + "github.com/breakpilot/ai-compliance-sdk/internal/dsms" "github.com/breakpilot/ai-compliance-sdk/internal/iace" "github.com/breakpilot/ai-compliance-sdk/internal/rbac" "github.com/gin-gonic/gin" @@ -411,6 +412,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("PDF export failed: %v", err)}) return } + archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.pdf", safeName), projectID.String()) c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.pdf"`, safeName)) c.Data(http.StatusOK, "application/pdf", data) @@ -420,6 +422,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Excel export failed: %v", err)}) return } + archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.xlsx", safeName), projectID.String()) c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.xlsx"`, safeName)) c.Data(http.StatusOK, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", data) @@ -429,6 +432,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("DOCX export failed: %v", err)}) return } + archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.docx", safeName), projectID.String()) c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.docx"`, safeName)) c.Data(http.StatusOK, "application/vnd.openxmlformats-officedocument.wordprocessingml.document", data) @@ -438,6 +442,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) { c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Markdown export failed: %v", err)}) return } + archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.md", safeName), projectID.String()) c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.md"`, safeName)) c.Data(http.StatusOK, "text/markdown", data) @@ -462,3 +467,8 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) { }) } } + +// archiveTechFile stores a tech-file export to DSMS (best-effort, non-blocking). +func archiveTechFile(data []byte, filename, projectID string) { + dsms.Archive(data, filename, "ce_techfile", projectID, "1") +} diff --git a/backend-compliance/compliance/api/evidence_routes.py b/backend-compliance/compliance/api/evidence_routes.py index 9eb661f..29acba1 100644 --- a/backend-compliance/compliance/api/evidence_routes.py +++ b/backend-compliance/compliance/api/evidence_routes.py @@ -248,7 +248,10 @@ async def create_evidence( ) db.commit() - return _build_evidence_response(evidence) + resp = _build_evidence_response(evidence) + if dsms_cid: + resp["dsms_cid"] = dsms_cid + return resp @router.delete("/evidence/{evidence_id}") @@ -313,6 +316,25 @@ async def upload_evidence( evidence.confidence_level = EvidenceConfidenceEnum.E1 evidence.truth_status = EvidenceTruthStatusEnum.UPLOADED + # Archive to DSMS (best-effort, non-blocking) + dsms_cid = None + try: + from compliance.services.dsms_client import archive_to_dsms + dsms_result = await archive_to_dsms( + content=content, filename=file.filename, + document_type="evidence", document_id=str(evidence.id), + version="1", tenant_id=control_id, + ) + dsms_cid = dsms_result.get("cid") + if dsms_cid: + evidence.confidence_level = EvidenceConfidenceEnum.E2 + from compliance.api.audit_trail_utils import log_audit_trail + log_audit_trail(db, "evidence", str(evidence.id), title, "archive", + "system", field_changed="dsms_cid", new_value=dsms_cid, + change_summary=f"Evidence archived to DSMS: {dsms_cid}") + except Exception: + pass # DSMS unavailable + # Four-Eyes: check if the linked control's domain requires it control_domain = control.domain.value if control.domain else "" if _requires_four_eyes(control_domain): @@ -321,7 +343,10 @@ async def upload_evidence( db.commit() - return _build_evidence_response(evidence) + resp = _build_evidence_response(evidence) + if dsms_cid: + resp["dsms_cid"] = dsms_cid + return resp # ============================================================================ @@ -813,7 +838,10 @@ async def review_evidence( db.commit() db.refresh(evidence) - return _build_evidence_response(evidence) + resp = _build_evidence_response(evidence) + if dsms_cid: + resp["dsms_cid"] = dsms_cid + return resp @router.patch("/evidence/{evidence_id}/reject", response_model=EvidenceResponse) @@ -840,7 +868,10 @@ async def reject_evidence( db.commit() db.refresh(evidence) - return _build_evidence_response(evidence) + resp = _build_evidence_response(evidence) + if dsms_cid: + resp["dsms_cid"] = dsms_cid + return resp # ============================================================================ diff --git a/dsms-gateway/models.py b/dsms-gateway/models.py index e5ec1eb..ecd94da 100644 --- a/dsms-gateway/models.py +++ b/dsms-gateway/models.py @@ -15,6 +15,8 @@ class DocumentMetadata(BaseModel): created_at: Optional[str] = None checksum: Optional[str] = None encrypted: bool = False + parent_cid: Optional[str] = None # CID of previous version (version chain) + tenant_id: Optional[str] = None class StoredDocument(BaseModel): diff --git a/dsms-gateway/routers/documents.py b/dsms-gateway/routers/documents.py index b45ffa5..a30f3ed 100644 --- a/dsms-gateway/routers/documents.py +++ b/dsms-gateway/routers/documents.py @@ -254,3 +254,34 @@ async def archive_legal_document( "archived_at": datetime.utcnow().isoformat(), "verification_url": f"{IPFS_GATEWAY_URL}/ipfs/{cid}" } + + +@router.get("/documents/{cid}/history") +async def get_document_history(cid: str): + """Follow the parent_cid chain to reconstruct version history.""" + history = [] + current_cid = cid + max_depth = 50 # prevent infinite loops + + for _ in range(max_depth): + try: + raw = await ipfs_cat(current_cid) + package = json.loads(raw) + metadata = package.get("metadata", {}) + history.append({ + "cid": current_cid, + "version": metadata.get("version"), + "document_type": metadata.get("document_type"), + "document_id": metadata.get("document_id"), + "parent_cid": metadata.get("parent_cid"), + "created_at": metadata.get("created_at"), + "checksum": metadata.get("checksum"), + }) + parent = metadata.get("parent_cid") + if not parent: + break + current_cid = parent + except Exception: + break + + return {"cid": cid, "history": history, "depth": len(history)}