fix: Restore all files lost during destructive rebase

A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-02-09 09:51:32 +01:00
parent f7487ee240
commit 21a844cb8a
1986 changed files with 744143 additions and 1731 deletions

32
dsms-gateway/Dockerfile Normal file
View File

@@ -0,0 +1,32 @@
# DSMS Gateway - REST API für dezentrales Speichersystem
FROM python:3.11-slim
LABEL maintainer="BreakPilot <dev@breakpilot.app>"
LABEL description="DSMS Gateway - REST API wrapper for IPFS"
WORKDIR /app
# Install curl for healthcheck and dependencies
RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/*
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY main.py .
# Environment variables
ENV IPFS_API_URL=http://dsms-node:5001
ENV IPFS_GATEWAY_URL=http://dsms-node:8080
ENV PORT=8082
# Expose port
EXPOSE 8082
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8082/health || exit 1
# Run application
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8082"]

467
dsms-gateway/main.py Normal file
View File

@@ -0,0 +1,467 @@
"""
DSMS Gateway - REST API für dezentrales Speichersystem
Bietet eine vereinfachte API über IPFS für BreakPilot
"""
import os
import json
import httpx
import hashlib
from datetime import datetime
from typing import Optional
from fastapi import FastAPI, HTTPException, UploadFile, File, Header, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import io
app = FastAPI(
title="DSMS Gateway",
description="Dezentrales Daten Speicher System Gateway für BreakPilot",
version="1.0.0"
)
# CORS Configuration
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:8000", "http://backend:8000", "*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Configuration
IPFS_API_URL = os.getenv("IPFS_API_URL", "http://dsms-node:5001")
IPFS_GATEWAY_URL = os.getenv("IPFS_GATEWAY_URL", "http://dsms-node:8080")
JWT_SECRET = os.getenv("JWT_SECRET", "your-super-secret-jwt-key-change-in-production")
# Models
class DocumentMetadata(BaseModel):
"""Metadaten für gespeicherte Dokumente"""
document_type: str # 'legal_document', 'consent_record', 'audit_log'
document_id: Optional[str] = None
version: Optional[str] = None
language: Optional[str] = "de"
created_at: Optional[str] = None
checksum: Optional[str] = None
encrypted: bool = False
class StoredDocument(BaseModel):
"""Antwort nach erfolgreichem Speichern"""
cid: str # Content Identifier (IPFS Hash)
size: int
metadata: DocumentMetadata
gateway_url: str
timestamp: str
class DocumentList(BaseModel):
"""Liste der gespeicherten Dokumente"""
documents: list
total: int
# Helper Functions
async def verify_token(authorization: Optional[str] = Header(None)) -> dict:
"""Verifiziert JWT Token (vereinfacht für MVP)"""
if not authorization:
raise HTTPException(status_code=401, detail="Authorization header fehlt")
# In Produktion: JWT validieren
# Für MVP: Einfache Token-Prüfung
if not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Ungültiges Token-Format")
return {"valid": True}
async def ipfs_add(content: bytes, pin: bool = True) -> dict:
"""Fügt Inhalt zu IPFS hinzu"""
async with httpx.AsyncClient(timeout=60.0) as client:
files = {"file": ("document", content)}
params = {"pin": str(pin).lower()}
response = await client.post(
f"{IPFS_API_URL}/api/v0/add",
files=files,
params=params
)
if response.status_code != 200:
raise HTTPException(
status_code=502,
detail=f"IPFS Fehler: {response.text}"
)
return response.json()
async def ipfs_cat(cid: str) -> bytes:
"""Liest Inhalt von IPFS"""
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/cat",
params={"arg": cid}
)
if response.status_code != 200:
raise HTTPException(
status_code=404,
detail=f"Dokument nicht gefunden: {cid}"
)
return response.content
async def ipfs_pin_ls() -> list:
"""Listet alle gepinnten Objekte"""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/pin/ls",
params={"type": "recursive"}
)
if response.status_code != 200:
return []
data = response.json()
return list(data.get("Keys", {}).keys())
# API Endpoints
@app.get("/health")
async def health_check():
"""Health Check für DSMS Gateway"""
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.post(f"{IPFS_API_URL}/api/v0/id")
ipfs_status = response.status_code == 200
except Exception:
ipfs_status = False
return {
"status": "healthy" if ipfs_status else "degraded",
"ipfs_connected": ipfs_status,
"timestamp": datetime.utcnow().isoformat()
}
@app.post("/api/v1/documents", response_model=StoredDocument)
async def store_document(
file: UploadFile = File(...),
document_type: str = "legal_document",
document_id: Optional[str] = None,
version: Optional[str] = None,
language: str = "de",
_auth: dict = Depends(verify_token)
):
"""
Speichert ein Dokument im DSMS.
- **file**: Das zu speichernde Dokument
- **document_type**: Typ des Dokuments (legal_document, consent_record, audit_log)
- **document_id**: Optionale ID des Dokuments
- **version**: Optionale Versionsnummer
- **language**: Sprache (default: de)
"""
content = await file.read()
# Checksum berechnen
checksum = hashlib.sha256(content).hexdigest()
# Metadaten erstellen
metadata = DocumentMetadata(
document_type=document_type,
document_id=document_id,
version=version,
language=language,
created_at=datetime.utcnow().isoformat(),
checksum=checksum,
encrypted=False
)
# Dokument mit Metadaten als JSON verpacken
package = {
"metadata": metadata.model_dump(),
"content_base64": content.hex(), # Hex-encodiert für JSON
"filename": file.filename
}
package_bytes = json.dumps(package).encode()
# Zu IPFS hinzufügen
result = await ipfs_add(package_bytes)
cid = result.get("Hash")
size = int(result.get("Size", 0))
return StoredDocument(
cid=cid,
size=size,
metadata=metadata,
gateway_url=f"{IPFS_GATEWAY_URL}/ipfs/{cid}",
timestamp=datetime.utcnow().isoformat()
)
@app.get("/api/v1/documents/{cid}")
async def get_document(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Ruft ein Dokument aus dem DSMS ab.
- **cid**: Content Identifier (IPFS Hash)
"""
content = await ipfs_cat(cid)
try:
package = json.loads(content)
metadata = package.get("metadata", {})
original_content = bytes.fromhex(package.get("content_base64", ""))
filename = package.get("filename", "document")
return StreamingResponse(
io.BytesIO(original_content),
media_type="application/octet-stream",
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
"X-DSMS-Document-Type": metadata.get("document_type", "unknown"),
"X-DSMS-Checksum": metadata.get("checksum", ""),
"X-DSMS-Created-At": metadata.get("created_at", "")
}
)
except json.JSONDecodeError:
# Wenn es kein DSMS-Paket ist, gib rohen Inhalt zurück
return StreamingResponse(
io.BytesIO(content),
media_type="application/octet-stream"
)
@app.get("/api/v1/documents/{cid}/metadata")
async def get_document_metadata(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Ruft nur die Metadaten eines Dokuments ab.
- **cid**: Content Identifier (IPFS Hash)
"""
content = await ipfs_cat(cid)
try:
package = json.loads(content)
return {
"cid": cid,
"metadata": package.get("metadata", {}),
"filename": package.get("filename"),
"size": len(bytes.fromhex(package.get("content_base64", "")))
}
except json.JSONDecodeError:
return {
"cid": cid,
"metadata": {},
"raw_size": len(content)
}
@app.get("/api/v1/documents", response_model=DocumentList)
async def list_documents(
_auth: dict = Depends(verify_token)
):
"""
Listet alle gespeicherten Dokumente auf.
"""
cids = await ipfs_pin_ls()
documents = []
for cid in cids[:100]: # Limit auf 100 für Performance
try:
content = await ipfs_cat(cid)
package = json.loads(content)
documents.append({
"cid": cid,
"metadata": package.get("metadata", {}),
"filename": package.get("filename")
})
except Exception:
# Überspringe nicht-DSMS Objekte
continue
return DocumentList(
documents=documents,
total=len(documents)
)
@app.delete("/api/v1/documents/{cid}")
async def unpin_document(
cid: str,
_auth: dict = Depends(verify_token)
):
"""
Entfernt ein Dokument aus dem lokalen Pin-Set.
Das Dokument bleibt im Netzwerk, wird aber bei GC entfernt.
- **cid**: Content Identifier (IPFS Hash)
"""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{IPFS_API_URL}/api/v0/pin/rm",
params={"arg": cid}
)
if response.status_code != 200:
raise HTTPException(
status_code=404,
detail=f"Konnte Pin nicht entfernen: {cid}"
)
return {
"status": "unpinned",
"cid": cid,
"message": "Dokument wird bei nächster Garbage Collection entfernt"
}
@app.post("/api/v1/legal-documents/archive")
async def archive_legal_document(
document_id: str,
version: str,
content: str,
language: str = "de",
_auth: dict = Depends(verify_token)
):
"""
Archiviert eine rechtliche Dokumentversion dauerhaft.
Speziell für AGB, Datenschutzerklärung, etc.
- **document_id**: ID des Legal Documents
- **version**: Versionsnummer
- **content**: HTML/Markdown Inhalt
- **language**: Sprache
"""
# Checksum berechnen
content_bytes = content.encode('utf-8')
checksum = hashlib.sha256(content_bytes).hexdigest()
# Metadaten
metadata = {
"document_type": "legal_document",
"document_id": document_id,
"version": version,
"language": language,
"created_at": datetime.utcnow().isoformat(),
"checksum": checksum,
"content_type": "text/html"
}
# Paket erstellen
package = {
"metadata": metadata,
"content": content,
"archived_at": datetime.utcnow().isoformat()
}
package_bytes = json.dumps(package, ensure_ascii=False).encode('utf-8')
# Zu IPFS hinzufügen
result = await ipfs_add(package_bytes)
cid = result.get("Hash")
return {
"cid": cid,
"document_id": document_id,
"version": version,
"checksum": checksum,
"archived_at": datetime.utcnow().isoformat(),
"verification_url": f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
}
@app.get("/api/v1/verify/{cid}")
async def verify_document(cid: str):
"""
Verifiziert die Integrität eines Dokuments.
Öffentlich zugänglich für Audit-Zwecke.
- **cid**: Content Identifier (IPFS Hash)
"""
try:
content = await ipfs_cat(cid)
package = json.loads(content)
# Checksum verifizieren
stored_checksum = package.get("metadata", {}).get("checksum")
if "content_base64" in package:
original_content = bytes.fromhex(package["content_base64"])
calculated_checksum = hashlib.sha256(original_content).hexdigest()
elif "content" in package:
calculated_checksum = hashlib.sha256(
package["content"].encode('utf-8')
).hexdigest()
else:
calculated_checksum = None
integrity_valid = (
stored_checksum == calculated_checksum
if stored_checksum and calculated_checksum
else None
)
return {
"cid": cid,
"exists": True,
"integrity_valid": integrity_valid,
"metadata": package.get("metadata", {}),
"stored_checksum": stored_checksum,
"calculated_checksum": calculated_checksum,
"verified_at": datetime.utcnow().isoformat()
}
except Exception as e:
return {
"cid": cid,
"exists": False,
"error": str(e),
"verified_at": datetime.utcnow().isoformat()
}
@app.get("/api/v1/node/info")
async def get_node_info():
"""
Gibt Informationen über den DSMS Node zurück.
"""
try:
async with httpx.AsyncClient(timeout=10.0) as client:
# Node ID
id_response = await client.post(f"{IPFS_API_URL}/api/v0/id")
node_info = id_response.json() if id_response.status_code == 200 else {}
# Repo Stats
stat_response = await client.post(f"{IPFS_API_URL}/api/v0/repo/stat")
repo_stats = stat_response.json() if stat_response.status_code == 200 else {}
return {
"node_id": node_info.get("ID"),
"protocol_version": node_info.get("ProtocolVersion"),
"agent_version": node_info.get("AgentVersion"),
"repo_size": repo_stats.get("RepoSize"),
"storage_max": repo_stats.get("StorageMax"),
"num_objects": repo_stats.get("NumObjects"),
"addresses": node_info.get("Addresses", [])[:5] # Erste 5
}
except Exception as e:
return {"error": str(e)}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8082)

View File

@@ -0,0 +1,9 @@
fastapi>=0.104.0
uvicorn>=0.24.0
httpx>=0.25.0
pydantic>=2.5.0
python-multipart>=0.0.6
# Testing
pytest>=7.4.0
pytest-asyncio>=0.21.0

612
dsms-gateway/test_main.py Normal file
View File

@@ -0,0 +1,612 @@
"""
Unit Tests für DSMS Gateway
Tests für alle API-Endpoints und Hilfsfunktionen
"""
import pytest
import hashlib
import json
from unittest.mock import AsyncMock, patch, MagicMock
from fastapi.testclient import TestClient
from httpx import Response
# Import der App
from main import app, DocumentMetadata, StoredDocument, DocumentList
# Test Client
client = TestClient(app)
# ==================== Fixtures ====================
@pytest.fixture
def valid_auth_header():
"""Gültiger Authorization Header für Tests"""
return {"Authorization": "Bearer test-token-12345"}
@pytest.fixture
def sample_document_metadata():
"""Beispiel-Metadaten für Tests"""
return DocumentMetadata(
document_type="legal_document",
document_id="doc-123",
version="1.0",
language="de",
created_at="2024-01-01T00:00:00",
checksum="abc123",
encrypted=False
)
@pytest.fixture
def mock_ipfs_response():
"""Mock-Antwort von IPFS add"""
return {
"Hash": "QmTest1234567890abcdef",
"Size": "1024"
}
# ==================== Health Check Tests ====================
class TestHealthCheck:
"""Tests für den Health Check Endpoint"""
def test_health_check_ipfs_connected(self):
"""Test: Health Check wenn IPFS verbunden ist"""
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
mock_instance.post.return_value = MagicMock(status_code=200)
mock_client.return_value.__aenter__.return_value = mock_instance
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert "status" in data
assert "ipfs_connected" in data
assert "timestamp" in data
def test_health_check_ipfs_disconnected(self):
"""Test: Health Check wenn IPFS nicht erreichbar"""
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
mock_instance.post.side_effect = Exception("Connection failed")
mock_client.return_value.__aenter__.return_value = mock_instance
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert data["status"] == "degraded"
assert data["ipfs_connected"] is False
# ==================== Authorization Tests ====================
class TestAuthorization:
"""Tests für die Autorisierung"""
def test_documents_endpoint_without_auth_returns_401(self):
"""Test: Dokument-Endpoint ohne Auth gibt 401 zurück"""
response = client.get("/api/v1/documents")
assert response.status_code == 401
def test_documents_endpoint_with_invalid_token_returns_401(self):
"""Test: Ungültiges Token-Format gibt 401 zurück"""
response = client.get(
"/api/v1/documents",
headers={"Authorization": "InvalidFormat"}
)
assert response.status_code == 401
def test_documents_endpoint_with_valid_token_format(self, valid_auth_header):
"""Test: Gültiges Token-Format wird akzeptiert"""
with patch("main.ipfs_pin_ls", new_callable=AsyncMock) as mock_pin_ls:
mock_pin_ls.return_value = []
response = client.get(
"/api/v1/documents",
headers=valid_auth_header
)
assert response.status_code == 200
# ==================== Document Storage Tests ====================
class TestDocumentStorage:
"""Tests für das Speichern von Dokumenten"""
def test_store_document_success(self, valid_auth_header, mock_ipfs_response):
"""Test: Dokument erfolgreich speichern"""
with patch("main.ipfs_add", new_callable=AsyncMock) as mock_add:
mock_add.return_value = mock_ipfs_response
test_content = b"Test document content"
response = client.post(
"/api/v1/documents",
headers=valid_auth_header,
files={"file": ("test.txt", test_content, "text/plain")},
data={
"document_type": "legal_document",
"document_id": "doc-123",
"version": "1.0",
"language": "de"
}
)
assert response.status_code == 200
data = response.json()
assert "cid" in data
assert data["cid"] == "QmTest1234567890abcdef"
assert "metadata" in data
assert "gateway_url" in data
def test_store_document_calculates_checksum(self, valid_auth_header, mock_ipfs_response):
"""Test: Checksum wird korrekt berechnet"""
with patch("main.ipfs_add", new_callable=AsyncMock) as mock_add:
mock_add.return_value = mock_ipfs_response
test_content = b"Test content for checksum"
expected_checksum = hashlib.sha256(test_content).hexdigest()
response = client.post(
"/api/v1/documents",
headers=valid_auth_header,
files={"file": ("test.txt", test_content, "text/plain")}
)
assert response.status_code == 200
data = response.json()
assert data["metadata"]["checksum"] == expected_checksum
def test_store_document_without_file_returns_422(self, valid_auth_header):
"""Test: Fehlende Datei gibt 422 zurück"""
response = client.post(
"/api/v1/documents",
headers=valid_auth_header
)
assert response.status_code == 422
# ==================== Document Retrieval Tests ====================
class TestDocumentRetrieval:
"""Tests für das Abrufen von Dokumenten"""
def test_get_document_success(self, valid_auth_header):
"""Test: Dokument erfolgreich abrufen"""
test_content = b"Original content"
package = {
"metadata": {
"document_type": "legal_document",
"checksum": hashlib.sha256(test_content).hexdigest()
},
"content_base64": test_content.hex(),
"filename": "test.txt"
}
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
response = client.get(
"/api/v1/documents/QmTestCid123",
headers=valid_auth_header
)
assert response.status_code == 200
assert response.content == test_content
def test_get_document_not_found(self, valid_auth_header):
"""Test: Nicht existierendes Dokument gibt 404 zurück"""
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
from fastapi import HTTPException
mock_cat.side_effect = HTTPException(status_code=404, detail="Not found")
response = client.get(
"/api/v1/documents/QmNonExistent",
headers=valid_auth_header
)
assert response.status_code == 404
def test_get_document_metadata_success(self, valid_auth_header):
"""Test: Dokument-Metadaten abrufen"""
test_content = b"Content"
package = {
"metadata": {
"document_type": "legal_document",
"document_id": "doc-123",
"version": "1.0"
},
"content_base64": test_content.hex(),
"filename": "test.txt"
}
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
response = client.get(
"/api/v1/documents/QmTestCid123/metadata",
headers=valid_auth_header
)
assert response.status_code == 200
data = response.json()
assert data["cid"] == "QmTestCid123"
assert data["metadata"]["document_type"] == "legal_document"
# ==================== Document List Tests ====================
class TestDocumentList:
"""Tests für das Auflisten von Dokumenten"""
def test_list_documents_empty(self, valid_auth_header):
"""Test: Leere Dokumentenliste"""
with patch("main.ipfs_pin_ls", new_callable=AsyncMock) as mock_pin_ls:
mock_pin_ls.return_value = []
response = client.get(
"/api/v1/documents",
headers=valid_auth_header
)
assert response.status_code == 200
data = response.json()
assert data["documents"] == []
assert data["total"] == 0
def test_list_documents_with_items(self, valid_auth_header):
"""Test: Dokumentenliste mit Einträgen"""
package = {
"metadata": {"document_type": "legal_document"},
"content_base64": "68656c6c6f",
"filename": "test.txt"
}
with patch("main.ipfs_pin_ls", new_callable=AsyncMock) as mock_pin_ls:
mock_pin_ls.return_value = ["QmCid1", "QmCid2"]
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
response = client.get(
"/api/v1/documents",
headers=valid_auth_header
)
assert response.status_code == 200
data = response.json()
assert data["total"] == 2
# ==================== Document Deletion Tests ====================
class TestDocumentDeletion:
"""Tests für das Löschen von Dokumenten"""
def test_unpin_document_success(self, valid_auth_header):
"""Test: Dokument erfolgreich unpinnen"""
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
mock_instance.post.return_value = MagicMock(status_code=200)
mock_client.return_value.__aenter__.return_value = mock_instance
response = client.delete(
"/api/v1/documents/QmTestCid123",
headers=valid_auth_header
)
assert response.status_code == 200
data = response.json()
assert data["status"] == "unpinned"
assert data["cid"] == "QmTestCid123"
def test_unpin_document_not_found(self, valid_auth_header):
"""Test: Nicht existierendes Dokument unpinnen"""
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
mock_instance.post.return_value = MagicMock(status_code=404)
mock_client.return_value.__aenter__.return_value = mock_instance
response = client.delete(
"/api/v1/documents/QmNonExistent",
headers=valid_auth_header
)
assert response.status_code == 404
# ==================== Legal Document Archive Tests ====================
class TestLegalDocumentArchive:
"""Tests für die Legal Document Archivierung"""
def test_archive_legal_document_success(self, valid_auth_header, mock_ipfs_response):
"""Test: Legal Document erfolgreich archivieren"""
with patch("main.ipfs_add", new_callable=AsyncMock) as mock_add:
mock_add.return_value = mock_ipfs_response
response = client.post(
"/api/v1/legal-documents/archive",
headers=valid_auth_header,
params={
"document_id": "privacy-policy",
"version": "2.0",
"content": "<h1>Datenschutzerklärung</h1>",
"language": "de"
}
)
assert response.status_code == 200
data = response.json()
assert "cid" in data
assert data["document_id"] == "privacy-policy"
assert data["version"] == "2.0"
assert "checksum" in data
assert "archived_at" in data
def test_archive_legal_document_calculates_checksum(self, valid_auth_header, mock_ipfs_response):
"""Test: Checksum für HTML-Inhalt korrekt berechnet"""
content = "<h1>Test Content</h1>"
expected_checksum = hashlib.sha256(content.encode('utf-8')).hexdigest()
with patch("main.ipfs_add", new_callable=AsyncMock) as mock_add:
mock_add.return_value = mock_ipfs_response
response = client.post(
"/api/v1/legal-documents/archive",
headers=valid_auth_header,
params={
"document_id": "terms",
"version": "1.0",
"content": content
}
)
assert response.status_code == 200
data = response.json()
assert data["checksum"] == expected_checksum
# ==================== Document Verification Tests ====================
class TestDocumentVerification:
"""Tests für die Dokumenten-Verifizierung"""
def test_verify_document_integrity_valid(self):
"""Test: Dokument mit gültiger Integrität"""
content = "Test content"
checksum = hashlib.sha256(content.encode('utf-8')).hexdigest()
package = {
"metadata": {
"document_type": "legal_document",
"checksum": checksum
},
"content": content
}
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
response = client.get("/api/v1/verify/QmTestCid123")
assert response.status_code == 200
data = response.json()
assert data["exists"] is True
assert data["integrity_valid"] is True
assert data["stored_checksum"] == checksum
assert data["calculated_checksum"] == checksum
def test_verify_document_integrity_invalid(self):
"""Test: Dokument mit ungültiger Integrität (manipuliert)"""
package = {
"metadata": {
"document_type": "legal_document",
"checksum": "fake_checksum_12345"
},
"content": "Actual content"
}
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
response = client.get("/api/v1/verify/QmTestCid123")
assert response.status_code == 200
data = response.json()
assert data["exists"] is True
assert data["integrity_valid"] is False
def test_verify_document_not_found(self):
"""Test: Nicht existierendes Dokument verifizieren"""
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.side_effect = Exception("Not found")
response = client.get("/api/v1/verify/QmNonExistent")
assert response.status_code == 200
data = response.json()
assert data["exists"] is False
assert "error" in data
def test_verify_document_public_access(self):
"""Test: Verifizierung ist öffentlich zugänglich (keine Auth)"""
package = {
"metadata": {"checksum": "abc"},
"content": "test"
}
with patch("main.ipfs_cat", new_callable=AsyncMock) as mock_cat:
mock_cat.return_value = json.dumps(package).encode()
# Kein Authorization Header!
response = client.get("/api/v1/verify/QmTestCid123")
assert response.status_code == 200
# ==================== Node Info Tests ====================
class TestNodeInfo:
"""Tests für Node-Informationen"""
def test_get_node_info_success(self):
"""Test: Node-Informationen abrufen"""
id_response = {
"ID": "QmNodeId12345",
"ProtocolVersion": "ipfs/0.1.0",
"AgentVersion": "kubo/0.24.0",
"Addresses": ["/ip4/127.0.0.1/tcp/4001"]
}
stat_response = {
"RepoSize": 1048576,
"StorageMax": 10737418240,
"NumObjects": 42
}
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
async def mock_post(url, **kwargs):
mock_resp = MagicMock()
if "id" in url:
mock_resp.status_code = 200
mock_resp.json.return_value = id_response
elif "stat" in url:
mock_resp.status_code = 200
mock_resp.json.return_value = stat_response
return mock_resp
mock_instance.post = mock_post
mock_client.return_value.__aenter__.return_value = mock_instance
response = client.get("/api/v1/node/info")
assert response.status_code == 200
data = response.json()
assert data["node_id"] == "QmNodeId12345"
assert data["num_objects"] == 42
def test_get_node_info_public_access(self):
"""Test: Node-Info ist öffentlich zugänglich"""
with patch("main.httpx.AsyncClient") as mock_client:
mock_instance = AsyncMock()
mock_instance.post.return_value = MagicMock(
status_code=200,
json=lambda: {}
)
mock_client.return_value.__aenter__.return_value = mock_instance
# Kein Authorization Header!
response = client.get("/api/v1/node/info")
assert response.status_code == 200
# ==================== Model Tests ====================
class TestModels:
"""Tests für Pydantic Models"""
def test_document_metadata_defaults(self):
"""Test: DocumentMetadata Default-Werte"""
metadata = DocumentMetadata(document_type="test")
assert metadata.document_type == "test"
assert metadata.document_id is None
assert metadata.version is None
assert metadata.language == "de"
assert metadata.encrypted is False
def test_document_metadata_all_fields(self):
"""Test: DocumentMetadata mit allen Feldern"""
metadata = DocumentMetadata(
document_type="legal_document",
document_id="doc-123",
version="1.0",
language="en",
created_at="2024-01-01T00:00:00",
checksum="abc123",
encrypted=True
)
assert metadata.document_type == "legal_document"
assert metadata.document_id == "doc-123"
assert metadata.version == "1.0"
assert metadata.language == "en"
assert metadata.encrypted is True
def test_stored_document_model(self, sample_document_metadata):
"""Test: StoredDocument Model"""
stored = StoredDocument(
cid="QmTest123",
size=1024,
metadata=sample_document_metadata,
gateway_url="http://localhost:8080/ipfs/QmTest123",
timestamp="2024-01-01T00:00:00"
)
assert stored.cid == "QmTest123"
assert stored.size == 1024
assert stored.metadata.document_type == "legal_document"
def test_document_list_model(self):
"""Test: DocumentList Model"""
doc_list = DocumentList(
documents=[{"cid": "Qm1"}, {"cid": "Qm2"}],
total=2
)
assert doc_list.total == 2
assert len(doc_list.documents) == 2
# ==================== Integration Tests ====================
class TestIntegration:
"""Integration Tests (erfordern laufenden IPFS Node)"""
@pytest.mark.skip(reason="Erfordert laufenden IPFS Node")
def test_full_document_lifecycle(self, valid_auth_header):
"""Integration Test: Vollständiger Dokument-Lebenszyklus"""
# 1. Dokument speichern
response = client.post(
"/api/v1/documents",
headers=valid_auth_header,
files={"file": ("test.txt", b"Test content", "text/plain")}
)
assert response.status_code == 200
cid = response.json()["cid"]
# 2. Dokument abrufen
response = client.get(
f"/api/v1/documents/{cid}",
headers=valid_auth_header
)
assert response.status_code == 200
# 3. Verifizieren
response = client.get(f"/api/v1/verify/{cid}")
assert response.status_code == 200
assert response.json()["integrity_valid"] is True
# 4. Unpinnen
response = client.delete(
f"/api/v1/documents/{cid}",
headers=valid_auth_header
)
assert response.status_code == 200
# ==================== Run Tests ====================
if __name__ == "__main__":
pytest.main([__file__, "-v"])