feat(dsms): Stufe 2+3 — Evidence/TechFile → DSMS + Version Chains + Audit Timeline
Build + Deploy / build-admin-compliance (push) Successful in 1m58s
Build + Deploy / build-backend-compliance (push) Successful in 12s
Build + Deploy / build-ai-sdk (push) Successful in 11s
Build + Deploy / build-developer-portal (push) Successful in 11s
Build + Deploy / build-tts (push) Successful in 21s
Build + Deploy / build-document-crawler (push) Successful in 11s
Build + Deploy / build-dsms-gateway (push) Successful in 14s
Build + Deploy / build-dsms-node (push) Successful in 14s
CI / branch-name (push) Has been skipped
CI / guardrail-integrity (push) Has been skipped
CI / loc-budget (push) Failing after 15s
CI / secret-scan (push) Has been skipped
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / nodejs-build (push) Successful in 2m40s
CI / dep-audit (push) Has been skipped
CI / sbom-scan (push) Has been skipped
CI / test-go (push) Successful in 40s
CI / test-python-backend (push) Successful in 37s
CI / test-python-document-crawler (push) Successful in 26s
CI / test-python-dsms-gateway (push) Successful in 22s
CI / validate-canonical-controls (push) Successful in 14s
Build + Deploy / trigger-orca (push) Successful in 2m26s

Stufe 2A: Evidence Upload → automatische DSMS-Archivierung
- Nach SHA-256 Hash → archive_to_dsms(), CID im Audit-Trail
- Evidence mit CID wird automatisch zu E2 (hash-verifiziert) hochgestuft

Stufe 2B: IACE Tech-File Export → DSMS
- PDF/Excel/DOCX/Markdown Exporte werden nach DSMS archiviert
- archiveTechFile() Helper fuer alle 4 Formate

Stufe 3A: DSMS Gateway — parent_cid + History Endpoint
- parent_cid + tenant_id Felder in DocumentMetadata
- GET /documents/{cid}/history — folgt parent_cid-Chain (max 50 deep)

Stufe 3C: Audit Timeline UI
- Neue Seite /sdk/audit-timeline
- Vertikale Timeline mit farbigen Action-Dots
- Filter: Alle, Nachweis, DSMS-Archiv, Control, Dokument, DSFA, VVT, TOM
- CID-Badges fuer DSMS-archivierte Eintraege

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-05-12 13:55:07 +02:00
parent 06bfbd1dca
commit edbf6d2be5
6 changed files with 241 additions and 4 deletions
@@ -0,0 +1,46 @@
'use client'
import { useState, useEffect } from 'react'
export interface AuditEntry {
id: string
entity_type: string
entity_id: string
entity_name: string
action: string
field_changed: string | null
old_value: string | null
new_value: string | null
change_summary: string | null
performed_by: string
performed_at: string
}
export function useAuditTimeline() {
const [entries, setEntries] = useState<AuditEntry[]>([])
const [loading, setLoading] = useState(true)
const [filter, setFilter] = useState<string>('all')
useEffect(() => {
loadEntries()
}, [filter]) // eslint-disable-line react-hooks/exhaustive-deps
async function loadEntries() {
setLoading(true)
try {
const params = new URLSearchParams({ limit: '100' })
if (filter !== 'all') params.set('entity_type', filter)
const res = await fetch(`/api/sdk/v1/compliance/audit-trail?${params}`)
if (res.ok) {
const json = await res.json()
setEntries(json.entries || json.audit_trail || json || [])
}
} catch (err) {
console.error('Failed to load audit trail:', err)
} finally {
setLoading(false)
}
}
return { entries, loading, filter, setFilter }
}
@@ -0,0 +1,117 @@
'use client'
import { useAuditTimeline, type AuditEntry } from './_hooks/useAuditTimeline'
const ENTITY_LABELS: Record<string, string> = {
evidence: 'Nachweis', control: 'Control', document: 'Dokument',
dsfa: 'DSFA', vvt: 'VVT', tom: 'TOM', policy: 'Richtlinie',
dsms_archive: 'DSMS-Archiv', risk: 'Risiko',
}
const ACTION_COLORS: Record<string, string> = {
create: 'bg-green-500', update: 'bg-blue-500', delete: 'bg-red-500',
approve: 'bg-purple-500', archive: 'bg-emerald-500', review: 'bg-yellow-500',
sign: 'bg-indigo-500', reject: 'bg-red-400',
}
const FILTER_OPTIONS = ['all', 'evidence', 'dsms_archive', 'control', 'document', 'dsfa', 'vvt', 'tom']
export default function AuditTimelinePage() {
const { entries, loading, filter, setFilter } = useAuditTimeline()
return (
<div className="max-w-4xl mx-auto space-y-6">
<div>
<h1 className="text-2xl font-bold text-gray-900 dark:text-white">Audit Timeline</h1>
<p className="text-sm text-gray-500 mt-1">Chronologische Compliance-Historie mit DSMS-Nachweisen</p>
</div>
{/* Filter */}
<div className="flex gap-2 flex-wrap">
{FILTER_OPTIONS.map((f) => (
<button
key={f}
onClick={() => setFilter(f)}
className={`px-3 py-1.5 rounded-full text-xs font-medium transition-colors ${
filter === f
? 'bg-purple-600 text-white'
: 'bg-gray-100 text-gray-600 hover:bg-gray-200 dark:bg-gray-700 dark:text-gray-300'
}`}
>
{f === 'all' ? 'Alle' : ENTITY_LABELS[f] || f}
</button>
))}
</div>
{loading ? (
<div className="flex items-center justify-center h-32">
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-purple-600" />
</div>
) : entries.length === 0 ? (
<div className="text-center py-16 text-gray-500">
Keine Eintraege gefunden. Compliance-Aktionen werden automatisch protokolliert.
</div>
) : (
<div className="relative">
{/* Timeline line */}
<div className="absolute left-6 top-0 bottom-0 w-0.5 bg-gray-200 dark:bg-gray-700" />
<div className="space-y-4">
{entries.map((entry) => (
<TimelineEntry key={entry.id} entry={entry} />
))}
</div>
</div>
)}
</div>
)
}
function TimelineEntry({ entry }: { entry: AuditEntry }) {
const dotColor = ACTION_COLORS[entry.action] || 'bg-gray-400'
const isCID = entry.field_changed === 'dsms_cid' || entry.action === 'archive'
const date = new Date(entry.performed_at)
return (
<div className="relative flex gap-4 pl-3">
{/* Dot */}
<div className={`relative z-10 w-3 h-3 rounded-full mt-1.5 flex-shrink-0 ring-4 ring-white dark:ring-gray-900 ${dotColor}`} />
{/* Content */}
<div className="flex-1 bg-white dark:bg-gray-800 rounded-xl border border-gray-200 dark:border-gray-700 p-4 min-w-0">
<div className="flex items-start justify-between gap-3">
<div className="min-w-0">
<div className="flex items-center gap-2 flex-wrap">
<span className="text-sm font-medium text-gray-900 dark:text-white">{entry.entity_name}</span>
<span className="px-2 py-0.5 rounded text-[10px] font-medium bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-300">
{ENTITY_LABELS[entry.entity_type] || entry.entity_type}
</span>
<span className={`px-2 py-0.5 rounded text-[10px] font-medium text-white ${dotColor}`}>
{entry.action}
</span>
</div>
{entry.change_summary && (
<p className="text-xs text-gray-600 dark:text-gray-400 mt-1">{entry.change_summary}</p>
)}
{isCID && entry.new_value && (
<div className="mt-2 flex items-center gap-2">
<svg className="w-3.5 h-3.5 text-emerald-600 flex-shrink-0" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m5.618-4.016A11.955 11.955 0 0112 2.944a11.955 11.955 0 01-8.618 3.04A12.02 12.02 0 003 9c0 5.591 3.824 10.29 9 11.622 5.176-1.332 9-6.03 9-11.622 0-1.042-.133-2.052-.382-3.016z" />
</svg>
<code className="text-[10px] bg-emerald-50 text-emerald-700 px-2 py-0.5 rounded font-mono dark:bg-emerald-900/30 dark:text-emerald-300">
{entry.new_value.length > 20 ? entry.new_value.slice(0, 8) + '...' + entry.new_value.slice(-6) : entry.new_value}
</code>
<span className="text-[10px] text-emerald-500">DSMS/IPFS</span>
</div>
)}
</div>
<div className="text-right flex-shrink-0">
<div className="text-xs text-gray-400">{date.toLocaleDateString('de-DE')}</div>
<div className="text-[10px] text-gray-300">{date.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit' })}</div>
<div className="text-[10px] text-gray-300 mt-0.5">{entry.performed_by}</div>
</div>
</div>
</div>
</div>
)
}
@@ -5,6 +5,7 @@ import (
"net/http"
"strings"
"github.com/breakpilot/ai-compliance-sdk/internal/dsms"
"github.com/breakpilot/ai-compliance-sdk/internal/iace"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/gin-gonic/gin"
@@ -411,6 +412,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("PDF export failed: %v", err)})
return
}
archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.pdf", safeName), projectID.String())
c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.pdf"`, safeName))
c.Data(http.StatusOK, "application/pdf", data)
@@ -420,6 +422,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Excel export failed: %v", err)})
return
}
archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.xlsx", safeName), projectID.String())
c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.xlsx"`, safeName))
c.Data(http.StatusOK, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", data)
@@ -429,6 +432,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("DOCX export failed: %v", err)})
return
}
archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.docx", safeName), projectID.String())
c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.docx"`, safeName))
c.Data(http.StatusOK, "application/vnd.openxmlformats-officedocument.wordprocessingml.document", data)
@@ -438,6 +442,7 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("Markdown export failed: %v", err)})
return
}
archiveTechFile(data, fmt.Sprintf("CE-Akte-%s.md", safeName), projectID.String())
c.Header("Content-Disposition", fmt.Sprintf(`attachment; filename="CE-Akte-%s.md"`, safeName))
c.Data(http.StatusOK, "text/markdown", data)
@@ -462,3 +467,8 @@ func (h *IACEHandler) ExportTechFile(c *gin.Context) {
})
}
}
// archiveTechFile stores a tech-file export to DSMS (best-effort, non-blocking).
func archiveTechFile(data []byte, filename, projectID string) {
dsms.Archive(data, filename, "ce_techfile", projectID, "1")
}
@@ -248,7 +248,10 @@ async def create_evidence(
)
db.commit()
return _build_evidence_response(evidence)
resp = _build_evidence_response(evidence)
if dsms_cid:
resp["dsms_cid"] = dsms_cid
return resp
@router.delete("/evidence/{evidence_id}")
@@ -313,6 +316,25 @@ async def upload_evidence(
evidence.confidence_level = EvidenceConfidenceEnum.E1
evidence.truth_status = EvidenceTruthStatusEnum.UPLOADED
# Archive to DSMS (best-effort, non-blocking)
dsms_cid = None
try:
from compliance.services.dsms_client import archive_to_dsms
dsms_result = await archive_to_dsms(
content=content, filename=file.filename,
document_type="evidence", document_id=str(evidence.id),
version="1", tenant_id=control_id,
)
dsms_cid = dsms_result.get("cid")
if dsms_cid:
evidence.confidence_level = EvidenceConfidenceEnum.E2
from compliance.api.audit_trail_utils import log_audit_trail
log_audit_trail(db, "evidence", str(evidence.id), title, "archive",
"system", field_changed="dsms_cid", new_value=dsms_cid,
change_summary=f"Evidence archived to DSMS: {dsms_cid}")
except Exception:
pass # DSMS unavailable
# Four-Eyes: check if the linked control's domain requires it
control_domain = control.domain.value if control.domain else ""
if _requires_four_eyes(control_domain):
@@ -321,7 +343,10 @@ async def upload_evidence(
db.commit()
return _build_evidence_response(evidence)
resp = _build_evidence_response(evidence)
if dsms_cid:
resp["dsms_cid"] = dsms_cid
return resp
# ============================================================================
@@ -813,7 +838,10 @@ async def review_evidence(
db.commit()
db.refresh(evidence)
return _build_evidence_response(evidence)
resp = _build_evidence_response(evidence)
if dsms_cid:
resp["dsms_cid"] = dsms_cid
return resp
@router.patch("/evidence/{evidence_id}/reject", response_model=EvidenceResponse)
@@ -840,7 +868,10 @@ async def reject_evidence(
db.commit()
db.refresh(evidence)
return _build_evidence_response(evidence)
resp = _build_evidence_response(evidence)
if dsms_cid:
resp["dsms_cid"] = dsms_cid
return resp
# ============================================================================
+2
View File
@@ -15,6 +15,8 @@ class DocumentMetadata(BaseModel):
created_at: Optional[str] = None
checksum: Optional[str] = None
encrypted: bool = False
parent_cid: Optional[str] = None # CID of previous version (version chain)
tenant_id: Optional[str] = None
class StoredDocument(BaseModel):
+31
View File
@@ -254,3 +254,34 @@ async def archive_legal_document(
"archived_at": datetime.utcnow().isoformat(),
"verification_url": f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
}
@router.get("/documents/{cid}/history")
async def get_document_history(cid: str):
"""Follow the parent_cid chain to reconstruct version history."""
history = []
current_cid = cid
max_depth = 50 # prevent infinite loops
for _ in range(max_depth):
try:
raw = await ipfs_cat(current_cid)
package = json.loads(raw)
metadata = package.get("metadata", {})
history.append({
"cid": current_cid,
"version": metadata.get("version"),
"document_type": metadata.get("document_type"),
"document_id": metadata.get("document_id"),
"parent_cid": metadata.get("parent_cid"),
"created_at": metadata.get("created_at"),
"checksum": metadata.get("checksum"),
})
parent = metadata.get("parent_cid")
if not parent:
break
current_cid = parent
except Exception:
break
return {"cid": cid, "history": history, "depth": len(history)}