This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
BreakPilot Dev 19855efacc
Some checks failed
Tests / Go Tests (push) Has been cancelled
Tests / Python Tests (push) Has been cancelled
Tests / Integration Tests (push) Has been cancelled
Tests / Go Lint (push) Has been cancelled
Tests / Python Lint (push) Has been cancelled
Tests / Security Scan (push) Has been cancelled
Tests / All Checks Passed (push) Has been cancelled
Security Scanning / Secret Scanning (push) Has been cancelled
Security Scanning / Dependency Vulnerability Scan (push) Has been cancelled
Security Scanning / Go Security Scan (push) Has been cancelled
Security Scanning / Python Security Scan (push) Has been cancelled
Security Scanning / Node.js Security Scan (push) Has been cancelled
Security Scanning / Docker Image Security (push) Has been cancelled
Security Scanning / Security Summary (push) Has been cancelled
CI/CD Pipeline / Go Tests (push) Has been cancelled
CI/CD Pipeline / Python Tests (push) Has been cancelled
CI/CD Pipeline / Website Tests (push) Has been cancelled
CI/CD Pipeline / Linting (push) Has been cancelled
CI/CD Pipeline / Security Scan (push) Has been cancelled
CI/CD Pipeline / Docker Build & Push (push) Has been cancelled
CI/CD Pipeline / Integration Tests (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / CI Summary (push) Has been cancelled
ci/woodpecker/manual/build-ci-image Pipeline was successful
ci/woodpecker/manual/main Pipeline failed
feat: BreakPilot PWA - Full codebase (clean push without large binaries)
All services: admin-v2, studio-v2, website, ai-compliance-sdk,
consent-service, klausur-service, voice-service, and infrastructure.
Large PDFs and compiled binaries excluded via .gitignore.
2026-02-11 13:25:58 +01:00

468 lines
13 KiB
Python

"""
DSMS Gateway - REST API für dezentrales Speichersystem
Bietet eine vereinfachte API über IPFS für BreakPilot
"""
import os
import json
import httpx
import hashlib
from datetime import datetime
from typing import Optional
from fastapi import FastAPI, HTTPException, UploadFile, File, Header, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import io
app = FastAPI(
title="DSMS Gateway",
description="Dezentrales Daten Speicher System Gateway für BreakPilot",
version="1.0.0"
)
# CORS Configuration
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:8000", "http://backend:8000", "*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Configuration
IPFS_API_URL = os.getenv("IPFS_API_URL", "http://dsms-node:5001")
IPFS_GATEWAY_URL = os.getenv("IPFS_GATEWAY_URL", "http://dsms-node:8080")
JWT_SECRET = os.getenv("JWT_SECRET", "your-super-secret-jwt-key-change-in-production")
# Models
class DocumentMetadata(BaseModel):
"""Metadaten für gespeicherte Dokumente"""
document_type: str # 'legal_document', 'consent_record', 'audit_log'
document_id: Optional[str] = None
version: Optional[str] = None
language: Optional[str] = "de"
created_at: Optional[str] = None
checksum: Optional[str] = None
encrypted: bool = False
class StoredDocument(BaseModel):
"""Antwort nach erfolgreichem Speichern"""
cid: str # Content Identifier (IPFS Hash)
size: int
metadata: DocumentMetadata
gateway_url: str
timestamp: str
class DocumentList(BaseModel):
"""Liste der gespeicherten Dokumente"""
documents: list
total: int
# Helper Functions
async def verify_token(authorization: Optional[str] = Header(None)) -> dict:
"""Verifiziert JWT Token (vereinfacht für MVP)"""
if not authorization:
raise HTTPException(status_code=401, detail="Authorization header fehlt")
# In Produktion: JWT validieren
# Für MVP: Einfache Token-Prüfung
if not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Ungültiges Token-Format")
return {"valid": True}
async def ipfs_add(content: bytes, pin: bool = True) -> dict:
"""Fügt Inhalt zu IPFS hinzu"""
async with httpx.AsyncClient(timeout=60.0) as client:
files = {"file": ("document", content)}
params = {"pin": str(pin).lower()}
response = await client.post(
f"{IPFS_API_URL}/api/v0/add",
files=files,
params=params
)
if response.status_code != 200:
raise HTTPException(
status_code=502,
detail=f"IPFS Fehler: {response.text}"
)
return response.json()
async def ipfs_cat(cid: str) -> bytes:
"""Liest Inhalt von IPFS"""
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/cat",
params={"arg": cid}
)
if response.status_code != 200:
raise HTTPException(
status_code=404,
detail=f"Dokument nicht gefunden: {cid}"
)
return response.content
async def ipfs_pin_ls() -> list:
"""Listet alle gepinnten Objekte"""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/pin/ls",
params={"type": "recursive"}
)
if response.status_code != 200:
return []
data = response.json()
return list(data.get("Keys", {}).keys())
# API Endpoints
@app.get("/health")
async def health_check():
"""Health Check für DSMS Gateway"""
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.post(f"{IPFS_API_URL}/api/v0/id")
ipfs_status = response.status_code == 200
except Exception:
ipfs_status = False
return {
"status": "healthy" if ipfs_status else "degraded",
"ipfs_connected": ipfs_status,
"timestamp": datetime.utcnow().isoformat()
}
@app.post("/api/v1/documents", response_model=StoredDocument)
async def store_document(
file: UploadFile = File(...),
document_type: str = "legal_document",
document_id: Optional[str] = None,
version: Optional[str] = None,
language: str = "de",
_auth: dict = Depends(verify_token)
):
"""
Speichert ein Dokument im DSMS.
- **file**: Das zu speichernde Dokument
- **document_type**: Typ des Dokuments (legal_document, consent_record, audit_log)
- **document_id**: Optionale ID des Dokuments
- **version**: Optionale Versionsnummer
- **language**: Sprache (default: de)
"""
content = await file.read()
# Checksum berechnen
checksum = hashlib.sha256(content).hexdigest()
# Metadaten erstellen
metadata = DocumentMetadata(
document_type=document_type,
document_id=document_id,
version=version,
language=language,
created_at=datetime.utcnow().isoformat(),
checksum=checksum,
encrypted=False
)
# Dokument mit Metadaten als JSON verpacken
package = {
"metadata": metadata.model_dump(),
"content_base64": content.hex(), # Hex-encodiert für JSON
"filename": file.filename
}
package_bytes = json.dumps(package).encode()
# Zu IPFS hinzufügen
result = await ipfs_add(package_bytes)
cid = result.get("Hash")
size = int(result.get("Size", 0))
return StoredDocument(
cid=cid,
size=size,
metadata=metadata,
gateway_url=f"{IPFS_GATEWAY_URL}/ipfs/{cid}",
timestamp=datetime.utcnow().isoformat()
)
@app.get("/api/v1/documents/{cid}")
async def get_document(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Ruft ein Dokument aus dem DSMS ab.
- **cid**: Content Identifier (IPFS Hash)
"""
content = await ipfs_cat(cid)
try:
package = json.loads(content)
metadata = package.get("metadata", {})
original_content = bytes.fromhex(package.get("content_base64", ""))
filename = package.get("filename", "document")
return StreamingResponse(
io.BytesIO(original_content),
media_type="application/octet-stream",
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
"X-DSMS-Document-Type": metadata.get("document_type", "unknown"),
"X-DSMS-Checksum": metadata.get("checksum", ""),
"X-DSMS-Created-At": metadata.get("created_at", "")
}
)
except json.JSONDecodeError:
# Wenn es kein DSMS-Paket ist, gib rohen Inhalt zurück
return StreamingResponse(
io.BytesIO(content),
media_type="application/octet-stream"
)
@app.get("/api/v1/documents/{cid}/metadata")
async def get_document_metadata(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Ruft nur die Metadaten eines Dokuments ab.
- **cid**: Content Identifier (IPFS Hash)
"""
content = await ipfs_cat(cid)
try:
package = json.loads(content)
return {
"cid": cid,
"metadata": package.get("metadata", {}),
"filename": package.get("filename"),
"size": len(bytes.fromhex(package.get("content_base64", "")))
}
except json.JSONDecodeError:
return {
"cid": cid,
"metadata": {},
"raw_size": len(content)
}
@app.get("/api/v1/documents", response_model=DocumentList)
async def list_documents(
_auth: dict = Depends(verify_token)
):
"""
Listet alle gespeicherten Dokumente auf.
"""
cids = await ipfs_pin_ls()
documents = []
for cid in cids[:100]: # Limit auf 100 für Performance
try:
content = await ipfs_cat(cid)
package = json.loads(content)
documents.append({
"cid": cid,
"metadata": package.get("metadata", {}),
"filename": package.get("filename")
})
except Exception:
# Überspringe nicht-DSMS Objekte
continue
return DocumentList(
documents=documents,
total=len(documents)
)
@app.delete("/api/v1/documents/{cid}")
async def unpin_document(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Entfernt ein Dokument aus dem lokalen Pin-Set.
Das Dokument bleibt im Netzwerk, wird aber bei GC entfernt.
- **cid**: Content Identifier (IPFS Hash)
"""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/pin/rm",
params={"arg": cid}
)
if response.status_code != 200:
raise HTTPException(
status_code=404,
detail=f"Konnte Pin nicht entfernen: {cid}"
)
return {
"status": "unpinned",
"cid": cid,
"message": "Dokument wird bei nächster Garbage Collection entfernt"
}
@app.post("/api/v1/legal-documents/archive")
async def archive_legal_document(
document_id: str,
version: str,
content: str,
language: str = "de",
_auth: dict = Depends(verify_token)
):
"""
Archiviert eine rechtliche Dokumentversion dauerhaft.
Speziell für AGB, Datenschutzerklärung, etc.
- **document_id**: ID des Legal Documents
- **version**: Versionsnummer
- **content**: HTML/Markdown Inhalt
- **language**: Sprache
"""
# Checksum berechnen
content_bytes = content.encode('utf-8')
checksum = hashlib.sha256(content_bytes).hexdigest()
# Metadaten
metadata = {
"document_type": "legal_document",
"document_id": document_id,
"version": version,
"language": language,
"created_at": datetime.utcnow().isoformat(),
"checksum": checksum,
"content_type": "text/html"
}
# Paket erstellen
package = {
"metadata": metadata,
"content": content,
"archived_at": datetime.utcnow().isoformat()
}
package_bytes = json.dumps(package, ensure_ascii=False).encode('utf-8')
# Zu IPFS hinzufügen
result = await ipfs_add(package_bytes)
cid = result.get("Hash")
return {
"cid": cid,
"document_id": document_id,
"version": version,
"checksum": checksum,
"archived_at": datetime.utcnow().isoformat(),
"verification_url": f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
}
@app.get("/api/v1/verify/{cid}")
async def verify_document(cid: str):
"""
Verifiziert die Integrität eines Dokuments.
Öffentlich zugänglich für Audit-Zwecke.
- **cid**: Content Identifier (IPFS Hash)
"""
try:
content = await ipfs_cat(cid)
package = json.loads(content)
# Checksum verifizieren
stored_checksum = package.get("metadata", {}).get("checksum")
if "content_base64" in package:
original_content = bytes.fromhex(package["content_base64"])
calculated_checksum = hashlib.sha256(original_content).hexdigest()
elif "content" in package:
calculated_checksum = hashlib.sha256(
package["content"].encode('utf-8')
).hexdigest()
else:
calculated_checksum = None
integrity_valid = (
stored_checksum == calculated_checksum
if stored_checksum and calculated_checksum
else None
)
return {
"cid": cid,
"exists": True,
"integrity_valid": integrity_valid,
"metadata": package.get("metadata", {}),
"stored_checksum": stored_checksum,
"calculated_checksum": calculated_checksum,
"verified_at": datetime.utcnow().isoformat()
}
except Exception as e:
return {
"cid": cid,
"exists": False,
"error": str(e),
"verified_at": datetime.utcnow().isoformat()
}
@app.get("/api/v1/node/info")
async def get_node_info():
"""
Gibt Informationen über den DSMS Node zurück.
"""
try:
async with httpx.AsyncClient(timeout=10.0) as client:
# Node ID
id_response = await client.post(f"{IPFS_API_URL}/api/v0/id")
node_info = id_response.json() if id_response.status_code == 200 else {}
# Repo Stats
stat_response = await client.post(f"{IPFS_API_URL}/api/v0/repo/stat")
repo_stats = stat_response.json() if stat_response.status_code == 200 else {}
return {
"node_id": node_info.get("ID"),
"protocol_version": node_info.get("ProtocolVersion"),
"agent_version": node_info.get("AgentVersion"),
"repo_size": repo_stats.get("RepoSize"),
"storage_max": repo_stats.get("StorageMax"),
"num_objects": repo_stats.get("NumObjects"),
"addresses": node_info.get("Addresses", [])[:5] # Erste 5
}
except Exception as e:
return {"error": str(e)}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8082)