refactor(consent-sdk,dsms-gateway): split ConsentManager, types, and main.py
- consent-sdk/src/types/index.ts: extracted 438 LOC into core.ts, config.ts, vendor.ts, api.ts, events.ts, storage.ts, translations.ts; index.ts is now a 21-LOC barrel re-exporter - consent-sdk/src/core/ConsentManager.ts: extracted normalizeConsentInput, isConsentExpired, needsConsent, ALL_CATEGORIES, MINIMAL_CATEGORIES into consent-manager-helpers.ts; reduced from 467 to 345 LOC - dsms-gateway/main.py: extracted models → models.py, config → config.py, IPFS helpers + verify_token → dependencies.py, route handlers → routers/documents.py and routers/node.py; main.py is now a 41-LOC app factory; test mock paths updated accordingly (27/27 tests pass) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
256
dsms-gateway/routers/documents.py
Normal file
256
dsms-gateway/routers/documents.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Documents router — handles /api/v1/documents and /api/v1/legal-documents endpoints.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import io
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from models import DocumentList, DocumentMetadata, StoredDocument
|
||||
from dependencies import verify_token, ipfs_add, ipfs_cat, ipfs_pin_ls
|
||||
from config import IPFS_API_URL, IPFS_GATEWAY_URL
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/api/v1/documents", response_model=StoredDocument)
|
||||
async def store_document(
|
||||
file: UploadFile = File(...),
|
||||
document_type: str = "legal_document",
|
||||
document_id: Optional[str] = None,
|
||||
version: Optional[str] = None,
|
||||
language: str = "de",
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Speichert ein Dokument im DSMS.
|
||||
|
||||
- **file**: Das zu speichernde Dokument
|
||||
- **document_type**: Typ des Dokuments (legal_document, consent_record, audit_log)
|
||||
- **document_id**: Optionale ID des Dokuments
|
||||
- **version**: Optionale Versionsnummer
|
||||
- **language**: Sprache (default: de)
|
||||
"""
|
||||
content = await file.read()
|
||||
|
||||
# Checksum berechnen
|
||||
checksum = hashlib.sha256(content).hexdigest()
|
||||
|
||||
# Metadaten erstellen
|
||||
metadata = DocumentMetadata(
|
||||
document_type=document_type,
|
||||
document_id=document_id,
|
||||
version=version,
|
||||
language=language,
|
||||
created_at=datetime.utcnow().isoformat(),
|
||||
checksum=checksum,
|
||||
encrypted=False
|
||||
)
|
||||
|
||||
# Dokument mit Metadaten als JSON verpacken
|
||||
package = {
|
||||
"metadata": metadata.model_dump(),
|
||||
"content_base64": content.hex(), # Hex-encodiert für JSON
|
||||
"filename": file.filename
|
||||
}
|
||||
|
||||
package_bytes = json.dumps(package).encode()
|
||||
|
||||
# Zu IPFS hinzufügen
|
||||
result = await ipfs_add(package_bytes)
|
||||
|
||||
cid = result.get("Hash")
|
||||
size = int(result.get("Size", 0))
|
||||
|
||||
return StoredDocument(
|
||||
cid=cid,
|
||||
size=size,
|
||||
metadata=metadata,
|
||||
gateway_url=f"{IPFS_GATEWAY_URL}/ipfs/{cid}",
|
||||
timestamp=datetime.utcnow().isoformat()
|
||||
)
|
||||
|
||||
|
||||
@router.get("/api/v1/documents/{cid}")
|
||||
async def get_document(
|
||||
cid: str,
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Ruft ein Dokument aus dem DSMS ab.
|
||||
|
||||
- **cid**: Content Identifier (IPFS Hash)
|
||||
"""
|
||||
content = await ipfs_cat(cid)
|
||||
|
||||
try:
|
||||
package = json.loads(content)
|
||||
metadata = package.get("metadata", {})
|
||||
original_content = bytes.fromhex(package.get("content_base64", ""))
|
||||
filename = package.get("filename", "document")
|
||||
|
||||
return StreamingResponse(
|
||||
io.BytesIO(original_content),
|
||||
media_type="application/octet-stream",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
||||
"X-DSMS-Document-Type": metadata.get("document_type", "unknown"),
|
||||
"X-DSMS-Checksum": metadata.get("checksum", ""),
|
||||
"X-DSMS-Created-At": metadata.get("created_at", "")
|
||||
}
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
# Wenn es kein DSMS-Paket ist, gib rohen Inhalt zurück
|
||||
return StreamingResponse(
|
||||
io.BytesIO(content),
|
||||
media_type="application/octet-stream"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/api/v1/documents/{cid}/metadata")
|
||||
async def get_document_metadata(
|
||||
cid: str,
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Ruft nur die Metadaten eines Dokuments ab.
|
||||
|
||||
- **cid**: Content Identifier (IPFS Hash)
|
||||
"""
|
||||
content = await ipfs_cat(cid)
|
||||
|
||||
try:
|
||||
package = json.loads(content)
|
||||
return {
|
||||
"cid": cid,
|
||||
"metadata": package.get("metadata", {}),
|
||||
"filename": package.get("filename"),
|
||||
"size": len(bytes.fromhex(package.get("content_base64", "")))
|
||||
}
|
||||
except json.JSONDecodeError:
|
||||
return {
|
||||
"cid": cid,
|
||||
"metadata": {},
|
||||
"raw_size": len(content)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/api/v1/documents", response_model=DocumentList)
|
||||
async def list_documents(
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Listet alle gespeicherten Dokumente auf.
|
||||
"""
|
||||
cids = await ipfs_pin_ls()
|
||||
|
||||
documents = []
|
||||
for cid in cids[:100]: # Limit auf 100 für Performance
|
||||
try:
|
||||
content = await ipfs_cat(cid)
|
||||
package = json.loads(content)
|
||||
documents.append({
|
||||
"cid": cid,
|
||||
"metadata": package.get("metadata", {}),
|
||||
"filename": package.get("filename")
|
||||
})
|
||||
except Exception:
|
||||
# Überspringe nicht-DSMS Objekte
|
||||
continue
|
||||
|
||||
return DocumentList(
|
||||
documents=documents,
|
||||
total=len(documents)
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/api/v1/documents/{cid}")
|
||||
async def unpin_document(
|
||||
cid: str,
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Entfernt ein Dokument aus dem lokalen Pin-Set.
|
||||
Das Dokument bleibt im Netzwerk, wird aber bei GC entfernt.
|
||||
|
||||
- **cid**: Content Identifier (IPFS Hash)
|
||||
"""
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"{IPFS_API_URL}/api/v0/pin/rm",
|
||||
params={"arg": cid}
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Konnte Pin nicht entfernen: {cid}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "unpinned",
|
||||
"cid": cid,
|
||||
"message": "Dokument wird bei nächster Garbage Collection entfernt"
|
||||
}
|
||||
|
||||
|
||||
@router.post("/api/v1/legal-documents/archive")
|
||||
async def archive_legal_document(
|
||||
document_id: str,
|
||||
version: str,
|
||||
content: str,
|
||||
language: str = "de",
|
||||
_auth: dict = Depends(verify_token)
|
||||
):
|
||||
"""
|
||||
Archiviert eine rechtliche Dokumentversion dauerhaft.
|
||||
Speziell für AGB, Datenschutzerklärung, etc.
|
||||
|
||||
- **document_id**: ID des Legal Documents
|
||||
- **version**: Versionsnummer
|
||||
- **content**: HTML/Markdown Inhalt
|
||||
- **language**: Sprache
|
||||
"""
|
||||
# Checksum berechnen
|
||||
content_bytes = content.encode('utf-8')
|
||||
checksum = hashlib.sha256(content_bytes).hexdigest()
|
||||
|
||||
# Metadaten
|
||||
metadata = {
|
||||
"document_type": "legal_document",
|
||||
"document_id": document_id,
|
||||
"version": version,
|
||||
"language": language,
|
||||
"created_at": datetime.utcnow().isoformat(),
|
||||
"checksum": checksum,
|
||||
"content_type": "text/html"
|
||||
}
|
||||
|
||||
# Paket erstellen
|
||||
package = {
|
||||
"metadata": metadata,
|
||||
"content": content,
|
||||
"archived_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
package_bytes = json.dumps(package, ensure_ascii=False).encode('utf-8')
|
||||
|
||||
# Zu IPFS hinzufügen
|
||||
result = await ipfs_add(package_bytes)
|
||||
|
||||
cid = result.get("Hash")
|
||||
|
||||
return {
|
||||
"cid": cid,
|
||||
"document_id": document_id,
|
||||
"version": version,
|
||||
"checksum": checksum,
|
||||
"archived_at": datetime.utcnow().isoformat(),
|
||||
"verification_url": f"{IPFS_GATEWAY_URL}/ipfs/{cid}"
|
||||
}
|
||||
Reference in New Issue
Block a user