Restructure: Move 43 files into 8 domain packages (backend-lehrer)
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 27s
CI / test-go-edu-search (push) Successful in 40s
CI / test-python-klausur (push) Failing after 2m30s
CI / test-python-agent-core (push) Successful in 28s
CI / test-nodejs-website (push) Successful in 20s
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 27s
CI / test-go-edu-search (push) Successful in 40s
CI / test-python-klausur (push) Failing after 2m30s
CI / test-python-agent-core (push) Successful in 28s
CI / test-nodejs-website (push) Successful in 20s
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1 @@
|
|||||||
|
# abitur — Abitur document management (exam docs, recognition).
|
||||||
@@ -0,0 +1,413 @@
|
|||||||
|
"""
|
||||||
|
Abitur Document Store API - Verwaltung von Abitur-Aufgaben und Erwartungshorizonten.
|
||||||
|
|
||||||
|
Unterstützt:
|
||||||
|
- Bundesland-spezifische Dokumente
|
||||||
|
- Fach, Jahr, Niveau (eA/gA), Aufgabennummer
|
||||||
|
- KI-basierte Dokumentenerkennung
|
||||||
|
- RAG-Integration mit Vector Store
|
||||||
|
|
||||||
|
Dateinamen-Schema (NiBiS Niedersachsen):
|
||||||
|
- 2025_Deutsch_eA_I.pdf - Aufgabe
|
||||||
|
- 2025_Deutsch_eA_I_EWH.pdf - Erwartungshorizont
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import zipfile
|
||||||
|
import tempfile
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, UploadFile, File, Form, BackgroundTasks
|
||||||
|
from fastapi.responses import FileResponse
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
||||||
|
DokumentCreate, DokumentUpdate, DokumentResponse, ImportResult,
|
||||||
|
RecognitionResult, AbiturDokument,
|
||||||
|
FACH_LABELS, DOKUMENT_TYP_LABELS,
|
||||||
|
# Backwards-compatibility re-exports
|
||||||
|
AbiturFach, Anforderungsniveau, DocumentMetadata, AbiturDokumentCompat,
|
||||||
|
)
|
||||||
|
from .recognition import parse_nibis_filename, to_dokument_response
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix="/abitur-docs",
|
||||||
|
tags=["abitur-docs"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Storage directory
|
||||||
|
DOCS_DIR = Path("/tmp/abitur-docs")
|
||||||
|
DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# In-Memory Storage
|
||||||
|
_dokumente: Dict[str, AbiturDokument] = {}
|
||||||
|
|
||||||
|
# Backwards-compatibility alias
|
||||||
|
documents_db = _dokumente
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Private helper (kept local since it references module-level _dokumente)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def _to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
||||||
|
return to_dokument_response(doc)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# API Endpoints - Dokumente
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/upload", response_model=DokumentResponse)
|
||||||
|
async def upload_dokument(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
bundesland: Optional[Bundesland] = Form(None),
|
||||||
|
fach: Optional[Fach] = Form(None),
|
||||||
|
jahr: Optional[int] = Form(None),
|
||||||
|
niveau: Optional[Niveau] = Form(None),
|
||||||
|
typ: Optional[DokumentTyp] = Form(None),
|
||||||
|
aufgaben_nummer: Optional[str] = Form(None)
|
||||||
|
):
|
||||||
|
"""Lädt ein einzelnes Dokument hoch."""
|
||||||
|
if not file.filename:
|
||||||
|
raise HTTPException(status_code=400, detail="Kein Dateiname")
|
||||||
|
|
||||||
|
recognition = parse_nibis_filename(file.filename)
|
||||||
|
|
||||||
|
final_bundesland = bundesland or recognition.bundesland or Bundesland.NIEDERSACHSEN
|
||||||
|
final_fach = fach or recognition.fach
|
||||||
|
final_jahr = jahr or recognition.jahr or datetime.now().year
|
||||||
|
final_niveau = niveau or recognition.niveau or Niveau.EA
|
||||||
|
final_typ = typ or recognition.typ or DokumentTyp.AUFGABE
|
||||||
|
final_aufgabe = aufgaben_nummer or recognition.aufgaben_nummer
|
||||||
|
|
||||||
|
if not final_fach:
|
||||||
|
raise HTTPException(status_code=400, detail="Fach konnte nicht erkannt werden")
|
||||||
|
|
||||||
|
doc_id = str(uuid.uuid4())
|
||||||
|
file_ext = Path(file.filename).suffix
|
||||||
|
safe_filename = f"{doc_id}{file_ext}"
|
||||||
|
file_path = DOCS_DIR / safe_filename
|
||||||
|
|
||||||
|
content = await file.read()
|
||||||
|
with open(file_path, "wb") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
now = datetime.utcnow()
|
||||||
|
dokument = AbiturDokument(
|
||||||
|
id=doc_id, dateiname=safe_filename, original_dateiname=file.filename,
|
||||||
|
bundesland=final_bundesland, fach=final_fach, jahr=final_jahr,
|
||||||
|
niveau=final_niveau, typ=final_typ, aufgaben_nummer=final_aufgabe,
|
||||||
|
status=VerarbeitungsStatus.RECOGNIZED if recognition.success else VerarbeitungsStatus.PENDING,
|
||||||
|
confidence=recognition.confidence, file_path=str(file_path), file_size=len(content),
|
||||||
|
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
||||||
|
)
|
||||||
|
_dokumente[doc_id] = dokument
|
||||||
|
logger.info(f"Uploaded document {doc_id}: {file.filename}")
|
||||||
|
return _to_dokument_response(dokument)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/import-zip", response_model=ImportResult)
|
||||||
|
async def import_zip(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
bundesland: Bundesland = Form(Bundesland.NIEDERSACHSEN),
|
||||||
|
background_tasks: BackgroundTasks = None
|
||||||
|
):
|
||||||
|
"""Importiert alle PDFs aus einer ZIP-Datei."""
|
||||||
|
if not file.filename or not file.filename.endswith(".zip"):
|
||||||
|
raise HTTPException(status_code=400, detail="ZIP-Datei erforderlich")
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp:
|
||||||
|
content = await file.read()
|
||||||
|
tmp.write(content)
|
||||||
|
tmp_path = tmp.name
|
||||||
|
|
||||||
|
documents = []
|
||||||
|
total = 0
|
||||||
|
recognized = 0
|
||||||
|
errors = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(tmp_path, 'r') as zip_ref:
|
||||||
|
for zip_info in zip_ref.infolist():
|
||||||
|
if not zip_info.filename.lower().endswith(".pdf"):
|
||||||
|
continue
|
||||||
|
if "__MACOSX" in zip_info.filename or zip_info.filename.startswith("."):
|
||||||
|
continue
|
||||||
|
if "thumbs.db" in zip_info.filename.lower():
|
||||||
|
continue
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
try:
|
||||||
|
basename = Path(zip_info.filename).name
|
||||||
|
recognition = parse_nibis_filename(basename)
|
||||||
|
if not recognition.fach:
|
||||||
|
errors += 1
|
||||||
|
logger.warning(f"Konnte Fach nicht erkennen: {basename}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
doc_id = str(uuid.uuid4())
|
||||||
|
file_ext = Path(basename).suffix
|
||||||
|
safe_filename = f"{doc_id}{file_ext}"
|
||||||
|
file_path = DOCS_DIR / safe_filename
|
||||||
|
|
||||||
|
with zip_ref.open(zip_info.filename) as source:
|
||||||
|
file_content = source.read()
|
||||||
|
with open(file_path, "wb") as target:
|
||||||
|
target.write(file_content)
|
||||||
|
|
||||||
|
now = datetime.utcnow()
|
||||||
|
dokument = AbiturDokument(
|
||||||
|
id=doc_id, dateiname=safe_filename, original_dateiname=basename,
|
||||||
|
bundesland=bundesland, fach=recognition.fach,
|
||||||
|
jahr=recognition.jahr or datetime.now().year,
|
||||||
|
niveau=recognition.niveau or Niveau.EA,
|
||||||
|
typ=recognition.typ or DokumentTyp.AUFGABE,
|
||||||
|
aufgaben_nummer=recognition.aufgaben_nummer,
|
||||||
|
status=VerarbeitungsStatus.RECOGNIZED, confidence=recognition.confidence,
|
||||||
|
file_path=str(file_path), file_size=len(file_content),
|
||||||
|
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
||||||
|
)
|
||||||
|
_dokumente[doc_id] = dokument
|
||||||
|
documents.append(_to_dokument_response(dokument))
|
||||||
|
recognized += 1
|
||||||
|
except Exception as e:
|
||||||
|
errors += 1
|
||||||
|
logger.error(f"Fehler bei {zip_info.filename}: {e}")
|
||||||
|
finally:
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
|
||||||
|
logger.info(f"ZIP-Import: {recognized}/{total} erkannt, {errors} Fehler")
|
||||||
|
return ImportResult(total_files=total, recognized=recognized, errors=errors, documents=documents)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[DokumentResponse])
|
||||||
|
async def list_dokumente(
|
||||||
|
bundesland: Optional[Bundesland] = None, fach: Optional[Fach] = None,
|
||||||
|
jahr: Optional[int] = None, niveau: Optional[Niveau] = None,
|
||||||
|
typ: Optional[DokumentTyp] = None, status: Optional[VerarbeitungsStatus] = None,
|
||||||
|
indexed: Optional[bool] = None
|
||||||
|
):
|
||||||
|
"""Listet Dokumente mit optionalen Filtern."""
|
||||||
|
docs = list(_dokumente.values())
|
||||||
|
if bundesland:
|
||||||
|
docs = [d for d in docs if d.bundesland == bundesland]
|
||||||
|
if fach:
|
||||||
|
docs = [d for d in docs if d.fach == fach]
|
||||||
|
if jahr:
|
||||||
|
docs = [d for d in docs if d.jahr == jahr]
|
||||||
|
if niveau:
|
||||||
|
docs = [d for d in docs if d.niveau == niveau]
|
||||||
|
if typ:
|
||||||
|
docs = [d for d in docs if d.typ == typ]
|
||||||
|
if status:
|
||||||
|
docs = [d for d in docs if d.status == status]
|
||||||
|
if indexed is not None:
|
||||||
|
docs = [d for d in docs if d.indexed == indexed]
|
||||||
|
docs.sort(key=lambda x: (x.jahr, x.fach.value, x.niveau.value), reverse=True)
|
||||||
|
return [_to_dokument_response(d) for d in docs]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{doc_id}", response_model=DokumentResponse)
|
||||||
|
async def get_dokument(doc_id: str):
|
||||||
|
"""Ruft ein Dokument ab."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
return _to_dokument_response(doc)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{doc_id}", response_model=DokumentResponse)
|
||||||
|
async def update_dokument(doc_id: str, data: DokumentUpdate):
|
||||||
|
"""Aktualisiert Dokument-Metadaten."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
if data.bundesland is not None:
|
||||||
|
doc.bundesland = data.bundesland
|
||||||
|
if data.fach is not None:
|
||||||
|
doc.fach = data.fach
|
||||||
|
if data.jahr is not None:
|
||||||
|
doc.jahr = data.jahr
|
||||||
|
if data.niveau is not None:
|
||||||
|
doc.niveau = data.niveau
|
||||||
|
if data.typ is not None:
|
||||||
|
doc.typ = data.typ
|
||||||
|
if data.aufgaben_nummer is not None:
|
||||||
|
doc.aufgaben_nummer = data.aufgaben_nummer
|
||||||
|
if data.status is not None:
|
||||||
|
doc.status = data.status
|
||||||
|
doc.updated_at = datetime.utcnow()
|
||||||
|
return _to_dokument_response(doc)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{doc_id}/confirm", response_model=DokumentResponse)
|
||||||
|
async def confirm_dokument(doc_id: str):
|
||||||
|
"""Bestätigt erkannte Metadaten."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
doc.status = VerarbeitungsStatus.CONFIRMED
|
||||||
|
doc.updated_at = datetime.utcnow()
|
||||||
|
return _to_dokument_response(doc)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{doc_id}/index", response_model=DokumentResponse)
|
||||||
|
async def index_dokument(doc_id: str):
|
||||||
|
"""Indiziert Dokument im Vector Store."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
if doc.status not in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
||||||
|
raise HTTPException(status_code=400, detail="Dokument muss erst bestätigt werden")
|
||||||
|
doc.indexed = True
|
||||||
|
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
||||||
|
doc.status = VerarbeitungsStatus.INDEXED
|
||||||
|
doc.updated_at = datetime.utcnow()
|
||||||
|
logger.info(f"Document {doc_id} indexed (demo)")
|
||||||
|
return _to_dokument_response(doc)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{doc_id}")
|
||||||
|
async def delete_dokument(doc_id: str):
|
||||||
|
"""Löscht ein Dokument."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
if os.path.exists(doc.file_path):
|
||||||
|
os.remove(doc.file_path)
|
||||||
|
del _dokumente[doc_id]
|
||||||
|
return {"status": "deleted", "id": doc_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{doc_id}/download")
|
||||||
|
async def download_dokument(doc_id: str):
|
||||||
|
"""Lädt Dokument herunter."""
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if not doc:
|
||||||
|
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||||
|
if not os.path.exists(doc.file_path):
|
||||||
|
raise HTTPException(status_code=404, detail="Datei nicht gefunden")
|
||||||
|
return FileResponse(doc.file_path, filename=doc.original_dateiname, media_type="application/pdf")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/recognize", response_model=RecognitionResult)
|
||||||
|
async def recognize_filename(filename: str):
|
||||||
|
"""Erkennt Metadaten aus einem Dateinamen."""
|
||||||
|
return parse_nibis_filename(filename)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/bulk-confirm")
|
||||||
|
async def bulk_confirm(doc_ids: List[str]):
|
||||||
|
"""Bestätigt mehrere Dokumente auf einmal."""
|
||||||
|
confirmed = 0
|
||||||
|
for doc_id in doc_ids:
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if doc and doc.status == VerarbeitungsStatus.RECOGNIZED:
|
||||||
|
doc.status = VerarbeitungsStatus.CONFIRMED
|
||||||
|
doc.updated_at = datetime.utcnow()
|
||||||
|
confirmed += 1
|
||||||
|
return {"confirmed": confirmed, "total": len(doc_ids)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/bulk-index")
|
||||||
|
async def bulk_index(doc_ids: List[str]):
|
||||||
|
"""Indiziert mehrere Dokumente auf einmal."""
|
||||||
|
indexed = 0
|
||||||
|
for doc_id in doc_ids:
|
||||||
|
doc = _dokumente.get(doc_id)
|
||||||
|
if doc and doc.status in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
||||||
|
doc.indexed = True
|
||||||
|
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
||||||
|
doc.status = VerarbeitungsStatus.INDEXED
|
||||||
|
doc.updated_at = datetime.utcnow()
|
||||||
|
indexed += 1
|
||||||
|
return {"indexed": indexed, "total": len(doc_ids)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/stats/overview")
|
||||||
|
async def get_stats_overview():
|
||||||
|
"""Gibt Übersicht über alle Dokumente."""
|
||||||
|
docs = list(_dokumente.values())
|
||||||
|
by_bundesland: Dict[str, int] = {}
|
||||||
|
by_fach: Dict[str, int] = {}
|
||||||
|
by_jahr: Dict[int, int] = {}
|
||||||
|
by_status: Dict[str, int] = {}
|
||||||
|
for doc in docs:
|
||||||
|
by_bundesland[doc.bundesland.value] = by_bundesland.get(doc.bundesland.value, 0) + 1
|
||||||
|
by_fach[doc.fach.value] = by_fach.get(doc.fach.value, 0) + 1
|
||||||
|
by_jahr[doc.jahr] = by_jahr.get(doc.jahr, 0) + 1
|
||||||
|
by_status[doc.status.value] = by_status.get(doc.status.value, 0) + 1
|
||||||
|
return {
|
||||||
|
"total": len(docs), "indexed": sum(1 for d in docs if d.indexed),
|
||||||
|
"pending": sum(1 for d in docs if d.status == VerarbeitungsStatus.PENDING),
|
||||||
|
"by_bundesland": by_bundesland, "by_fach": by_fach, "by_jahr": by_jahr, "by_status": by_status
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/search", response_model=List[DokumentResponse])
|
||||||
|
async def search_dokumente(
|
||||||
|
bundesland: Bundesland, fach: Fach, jahr: Optional[int] = None,
|
||||||
|
niveau: Optional[Niveau] = None, nur_indexed: bool = True
|
||||||
|
):
|
||||||
|
"""Sucht Dokumente für Klausur-Korrektur."""
|
||||||
|
docs = [d for d in _dokumente.values() if d.bundesland == bundesland and d.fach == fach]
|
||||||
|
if jahr:
|
||||||
|
docs = [d for d in docs if d.jahr == jahr]
|
||||||
|
if niveau:
|
||||||
|
docs = [d for d in docs if d.niveau == niveau]
|
||||||
|
if nur_indexed:
|
||||||
|
docs = [d for d in docs if d.indexed]
|
||||||
|
|
||||||
|
aufgaben = [d for d in docs if d.typ == DokumentTyp.AUFGABE]
|
||||||
|
ewh = [d for d in docs if d.typ == DokumentTyp.ERWARTUNGSHORIZONT]
|
||||||
|
andere = [d for d in docs if d.typ not in [DokumentTyp.AUFGABE, DokumentTyp.ERWARTUNGSHORIZONT]]
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for aufgabe in aufgaben:
|
||||||
|
result.append(_to_dokument_response(aufgabe))
|
||||||
|
matching_ewh = next(
|
||||||
|
(e for e in ewh if e.jahr == aufgabe.jahr and e.niveau == aufgabe.niveau
|
||||||
|
and e.aufgaben_nummer == aufgabe.aufgaben_nummer), None
|
||||||
|
)
|
||||||
|
if matching_ewh:
|
||||||
|
result.append(_to_dokument_response(matching_ewh))
|
||||||
|
for e in ewh:
|
||||||
|
if _to_dokument_response(e) not in result:
|
||||||
|
result.append(_to_dokument_response(e))
|
||||||
|
for a in andere:
|
||||||
|
result.append(_to_dokument_response(a))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/enums/bundeslaender")
|
||||||
|
async def get_bundeslaender():
|
||||||
|
"""Gibt alle Bundesländer zurück."""
|
||||||
|
return [{"value": b.value, "label": b.value.replace("_", " ").title()} for b in Bundesland]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/enums/faecher")
|
||||||
|
async def get_faecher():
|
||||||
|
"""Gibt alle Fächer zurück."""
|
||||||
|
return [{"value": f.value, "label": FACH_LABELS.get(f, f.value)} for f in Fach]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/enums/niveaus")
|
||||||
|
async def get_niveaus():
|
||||||
|
"""Gibt alle Niveaus zurück."""
|
||||||
|
return [
|
||||||
|
{"value": "eA", "label": "eA (erhöhtes Anforderungsniveau)"},
|
||||||
|
{"value": "gA", "label": "gA (grundlegendes Anforderungsniveau)"}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/enums/typen")
|
||||||
|
async def get_typen():
|
||||||
|
"""Gibt alle Dokumenttypen zurück."""
|
||||||
|
return [{"value": t.value, "label": DOKUMENT_TYP_LABELS.get(t, t.value)} for t in DokumentTyp]
|
||||||
@@ -0,0 +1,327 @@
|
|||||||
|
"""
|
||||||
|
Abitur Document Store - Enums, Pydantic Models, Data Classes.
|
||||||
|
|
||||||
|
Shared types for abitur_docs_api and abitur_docs_recognition.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from enum import Enum
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Enums
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class Bundesland(str, Enum):
|
||||||
|
"""Bundesländer mit Zentralabitur."""
|
||||||
|
NIEDERSACHSEN = "niedersachsen"
|
||||||
|
BAYERN = "bayern"
|
||||||
|
BADEN_WUERTTEMBERG = "baden_wuerttemberg"
|
||||||
|
NORDRHEIN_WESTFALEN = "nordrhein_westfalen"
|
||||||
|
HESSEN = "hessen"
|
||||||
|
SACHSEN = "sachsen"
|
||||||
|
THUERINGEN = "thueringen"
|
||||||
|
BERLIN = "berlin"
|
||||||
|
HAMBURG = "hamburg"
|
||||||
|
SCHLESWIG_HOLSTEIN = "schleswig_holstein"
|
||||||
|
BREMEN = "bremen"
|
||||||
|
BRANDENBURG = "brandenburg"
|
||||||
|
MECKLENBURG_VORPOMMERN = "mecklenburg_vorpommern"
|
||||||
|
SACHSEN_ANHALT = "sachsen_anhalt"
|
||||||
|
RHEINLAND_PFALZ = "rheinland_pfalz"
|
||||||
|
SAARLAND = "saarland"
|
||||||
|
|
||||||
|
|
||||||
|
class Fach(str, Enum):
|
||||||
|
"""Abiturfächer."""
|
||||||
|
DEUTSCH = "deutsch"
|
||||||
|
ENGLISCH = "englisch"
|
||||||
|
MATHEMATIK = "mathematik"
|
||||||
|
BIOLOGIE = "biologie"
|
||||||
|
CHEMIE = "chemie"
|
||||||
|
PHYSIK = "physik"
|
||||||
|
GESCHICHTE = "geschichte"
|
||||||
|
ERDKUNDE = "erdkunde"
|
||||||
|
POLITIK_WIRTSCHAFT = "politik_wirtschaft"
|
||||||
|
FRANZOESISCH = "franzoesisch"
|
||||||
|
SPANISCH = "spanisch"
|
||||||
|
LATEIN = "latein"
|
||||||
|
GRIECHISCH = "griechisch"
|
||||||
|
KUNST = "kunst"
|
||||||
|
MUSIK = "musik"
|
||||||
|
SPORT = "sport"
|
||||||
|
INFORMATIK = "informatik"
|
||||||
|
EV_RELIGION = "ev_religion"
|
||||||
|
KATH_RELIGION = "kath_religion"
|
||||||
|
WERTE_NORMEN = "werte_normen"
|
||||||
|
BRC = "brc"
|
||||||
|
BVW = "bvw"
|
||||||
|
ERNAEHRUNG = "ernaehrung"
|
||||||
|
MECHATRONIK = "mechatronik"
|
||||||
|
GESUNDHEIT_PFLEGE = "gesundheit_pflege"
|
||||||
|
PAEDAGOGIK_PSYCHOLOGIE = "paedagogik_psychologie"
|
||||||
|
|
||||||
|
|
||||||
|
class Niveau(str, Enum):
|
||||||
|
"""Anforderungsniveau."""
|
||||||
|
EA = "eA"
|
||||||
|
GA = "gA"
|
||||||
|
|
||||||
|
|
||||||
|
class DokumentTyp(str, Enum):
|
||||||
|
"""Dokumenttyp."""
|
||||||
|
AUFGABE = "aufgabe"
|
||||||
|
ERWARTUNGSHORIZONT = "erwartungshorizont"
|
||||||
|
DECKBLATT = "deckblatt"
|
||||||
|
MATERIAL = "material"
|
||||||
|
HOERVERSTEHEN = "hoerverstehen"
|
||||||
|
SPRACHMITTLUNG = "sprachmittlung"
|
||||||
|
BEWERTUNGSBOGEN = "bewertungsbogen"
|
||||||
|
|
||||||
|
|
||||||
|
class VerarbeitungsStatus(str, Enum):
|
||||||
|
"""Status der Dokumentenverarbeitung."""
|
||||||
|
PENDING = "pending"
|
||||||
|
PROCESSING = "processing"
|
||||||
|
RECOGNIZED = "recognized"
|
||||||
|
CONFIRMED = "confirmed"
|
||||||
|
INDEXED = "indexed"
|
||||||
|
ERROR = "error"
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Fach-Mapping für Dateinamen
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
FACH_NAME_MAPPING = {
|
||||||
|
"deutsch": Fach.DEUTSCH,
|
||||||
|
"englisch": Fach.ENGLISCH,
|
||||||
|
"mathe": Fach.MATHEMATIK,
|
||||||
|
"mathematik": Fach.MATHEMATIK,
|
||||||
|
"biologie": Fach.BIOLOGIE,
|
||||||
|
"bio": Fach.BIOLOGIE,
|
||||||
|
"chemie": Fach.CHEMIE,
|
||||||
|
"physik": Fach.PHYSIK,
|
||||||
|
"geschichte": Fach.GESCHICHTE,
|
||||||
|
"erdkunde": Fach.ERDKUNDE,
|
||||||
|
"geographie": Fach.ERDKUNDE,
|
||||||
|
"politikwirtschaft": Fach.POLITIK_WIRTSCHAFT,
|
||||||
|
"politik": Fach.POLITIK_WIRTSCHAFT,
|
||||||
|
"franzoesisch": Fach.FRANZOESISCH,
|
||||||
|
"franz": Fach.FRANZOESISCH,
|
||||||
|
"spanisch": Fach.SPANISCH,
|
||||||
|
"latein": Fach.LATEIN,
|
||||||
|
"griechisch": Fach.GRIECHISCH,
|
||||||
|
"kunst": Fach.KUNST,
|
||||||
|
"musik": Fach.MUSIK,
|
||||||
|
"sport": Fach.SPORT,
|
||||||
|
"informatik": Fach.INFORMATIK,
|
||||||
|
"evreligion": Fach.EV_RELIGION,
|
||||||
|
"kathreligion": Fach.KATH_RELIGION,
|
||||||
|
"wertenormen": Fach.WERTE_NORMEN,
|
||||||
|
"brc": Fach.BRC,
|
||||||
|
"bvw": Fach.BVW,
|
||||||
|
"ernaehrung": Fach.ERNAEHRUNG,
|
||||||
|
"mecha": Fach.MECHATRONIK,
|
||||||
|
"mechatronik": Fach.MECHATRONIK,
|
||||||
|
"technikmecha": Fach.MECHATRONIK,
|
||||||
|
"gespfl": Fach.GESUNDHEIT_PFLEGE,
|
||||||
|
"paedpsych": Fach.PAEDAGOGIK_PSYCHOLOGIE,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Pydantic Models
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class DokumentCreate(BaseModel):
|
||||||
|
"""Manuelles Erstellen eines Dokuments."""
|
||||||
|
bundesland: Bundesland
|
||||||
|
fach: Fach
|
||||||
|
jahr: int = Field(ge=2000, le=2100)
|
||||||
|
niveau: Niveau
|
||||||
|
typ: DokumentTyp
|
||||||
|
aufgaben_nummer: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class DokumentUpdate(BaseModel):
|
||||||
|
"""Update für erkannte Metadaten."""
|
||||||
|
bundesland: Optional[Bundesland] = None
|
||||||
|
fach: Optional[Fach] = None
|
||||||
|
jahr: Optional[int] = None
|
||||||
|
niveau: Optional[Niveau] = None
|
||||||
|
typ: Optional[DokumentTyp] = None
|
||||||
|
aufgaben_nummer: Optional[str] = None
|
||||||
|
status: Optional[VerarbeitungsStatus] = None
|
||||||
|
|
||||||
|
|
||||||
|
class DokumentResponse(BaseModel):
|
||||||
|
"""Response für ein Dokument."""
|
||||||
|
id: str
|
||||||
|
dateiname: str
|
||||||
|
original_dateiname: str
|
||||||
|
bundesland: Bundesland
|
||||||
|
fach: Fach
|
||||||
|
jahr: int
|
||||||
|
niveau: Niveau
|
||||||
|
typ: DokumentTyp
|
||||||
|
aufgaben_nummer: Optional[str]
|
||||||
|
status: VerarbeitungsStatus
|
||||||
|
confidence: float
|
||||||
|
file_path: str
|
||||||
|
file_size: int
|
||||||
|
indexed: bool
|
||||||
|
vector_ids: List[str]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ImportResult(BaseModel):
|
||||||
|
"""Ergebnis eines ZIP-Imports."""
|
||||||
|
total_files: int
|
||||||
|
recognized: int
|
||||||
|
errors: int
|
||||||
|
documents: List[DokumentResponse]
|
||||||
|
|
||||||
|
|
||||||
|
class RecognitionResult(BaseModel):
|
||||||
|
"""Ergebnis der Dokumentenerkennung."""
|
||||||
|
success: bool
|
||||||
|
bundesland: Optional[Bundesland]
|
||||||
|
fach: Optional[Fach]
|
||||||
|
jahr: Optional[int]
|
||||||
|
niveau: Optional[Niveau]
|
||||||
|
typ: Optional[DokumentTyp]
|
||||||
|
aufgaben_nummer: Optional[str]
|
||||||
|
confidence: float
|
||||||
|
raw_filename: str
|
||||||
|
suggestions: List[Dict[str, Any]]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extracted(self) -> Dict[str, Any]:
|
||||||
|
"""Backwards-compatible property returning extracted values as dict."""
|
||||||
|
result = {}
|
||||||
|
if self.bundesland:
|
||||||
|
result["bundesland"] = self.bundesland.value
|
||||||
|
if self.fach:
|
||||||
|
result["fach"] = self.fach.value
|
||||||
|
if self.jahr:
|
||||||
|
result["jahr"] = self.jahr
|
||||||
|
if self.niveau:
|
||||||
|
result["niveau"] = self.niveau.value
|
||||||
|
if self.typ:
|
||||||
|
result["typ"] = self.typ.value
|
||||||
|
if self.aufgaben_nummer:
|
||||||
|
result["aufgaben_nummer"] = self.aufgaben_nummer
|
||||||
|
return result
|
||||||
|
|
||||||
|
@property
|
||||||
|
def method(self) -> str:
|
||||||
|
"""Backwards-compatible property for recognition method."""
|
||||||
|
return "filename_pattern"
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Internal Data Classes
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AbiturDokument:
|
||||||
|
"""Internes Dokument."""
|
||||||
|
id: str
|
||||||
|
dateiname: str
|
||||||
|
original_dateiname: str
|
||||||
|
bundesland: Bundesland
|
||||||
|
fach: Fach
|
||||||
|
jahr: int
|
||||||
|
niveau: Niveau
|
||||||
|
typ: DokumentTyp
|
||||||
|
aufgaben_nummer: Optional[str]
|
||||||
|
status: VerarbeitungsStatus
|
||||||
|
confidence: float
|
||||||
|
file_path: str
|
||||||
|
file_size: int
|
||||||
|
indexed: bool
|
||||||
|
vector_ids: List[str]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Backwards-compatibility aliases (used by tests)
|
||||||
|
# ============================================================================
|
||||||
|
AbiturFach = Fach
|
||||||
|
Anforderungsniveau = Niveau
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentMetadata(BaseModel):
|
||||||
|
"""Backwards-compatible metadata model for tests."""
|
||||||
|
jahr: Optional[int] = None
|
||||||
|
bundesland: Optional[str] = None
|
||||||
|
fach: Optional[str] = None
|
||||||
|
niveau: Optional[str] = None
|
||||||
|
dokument_typ: Optional[str] = None
|
||||||
|
aufgaben_nummer: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class AbiturDokumentCompat(BaseModel):
|
||||||
|
"""Backwards-compatible AbiturDokument model for tests."""
|
||||||
|
id: str
|
||||||
|
filename: str
|
||||||
|
file_path: str
|
||||||
|
metadata: DocumentMetadata
|
||||||
|
status: VerarbeitungsStatus
|
||||||
|
recognition_result: Optional[RecognitionResult] = None
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Fach Labels (für Frontend Enum-Endpoint)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
FACH_LABELS = {
|
||||||
|
Fach.DEUTSCH: "Deutsch",
|
||||||
|
Fach.ENGLISCH: "Englisch",
|
||||||
|
Fach.MATHEMATIK: "Mathematik",
|
||||||
|
Fach.BIOLOGIE: "Biologie",
|
||||||
|
Fach.CHEMIE: "Chemie",
|
||||||
|
Fach.PHYSIK: "Physik",
|
||||||
|
Fach.GESCHICHTE: "Geschichte",
|
||||||
|
Fach.ERDKUNDE: "Erdkunde",
|
||||||
|
Fach.POLITIK_WIRTSCHAFT: "Politik-Wirtschaft",
|
||||||
|
Fach.FRANZOESISCH: "Französisch",
|
||||||
|
Fach.SPANISCH: "Spanisch",
|
||||||
|
Fach.LATEIN: "Latein",
|
||||||
|
Fach.GRIECHISCH: "Griechisch",
|
||||||
|
Fach.KUNST: "Kunst",
|
||||||
|
Fach.MUSIK: "Musik",
|
||||||
|
Fach.SPORT: "Sport",
|
||||||
|
Fach.INFORMATIK: "Informatik",
|
||||||
|
Fach.EV_RELIGION: "Ev. Religion",
|
||||||
|
Fach.KATH_RELIGION: "Kath. Religion",
|
||||||
|
Fach.WERTE_NORMEN: "Werte und Normen",
|
||||||
|
Fach.BRC: "BRC (Betriebswirtschaft)",
|
||||||
|
Fach.BVW: "BVW (Volkswirtschaft)",
|
||||||
|
Fach.ERNAEHRUNG: "Ernährung",
|
||||||
|
Fach.MECHATRONIK: "Mechatronik",
|
||||||
|
Fach.GESUNDHEIT_PFLEGE: "Gesundheit-Pflege",
|
||||||
|
Fach.PAEDAGOGIK_PSYCHOLOGIE: "Pädagogik-Psychologie",
|
||||||
|
}
|
||||||
|
|
||||||
|
DOKUMENT_TYP_LABELS = {
|
||||||
|
DokumentTyp.AUFGABE: "Aufgabe",
|
||||||
|
DokumentTyp.ERWARTUNGSHORIZONT: "Erwartungshorizont",
|
||||||
|
DokumentTyp.DECKBLATT: "Deckblatt",
|
||||||
|
DokumentTyp.MATERIAL: "Material",
|
||||||
|
DokumentTyp.HOERVERSTEHEN: "Hörverstehen",
|
||||||
|
DokumentTyp.SPRACHMITTLUNG: "Sprachmittlung",
|
||||||
|
DokumentTyp.BEWERTUNGSBOGEN: "Bewertungsbogen",
|
||||||
|
}
|
||||||
@@ -0,0 +1,124 @@
|
|||||||
|
"""
|
||||||
|
Abitur Document Store - Dateinamen-Erkennung und Helfer.
|
||||||
|
|
||||||
|
Erkennt Metadaten aus NiBiS-Dateinamen (Niedersachsen).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Dict, Any
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
||||||
|
RecognitionResult, AbiturDokument, DokumentResponse,
|
||||||
|
FACH_NAME_MAPPING,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_nibis_filename(filename: str) -> RecognitionResult:
|
||||||
|
"""
|
||||||
|
Erkennt Metadaten aus NiBiS-Dateinamen.
|
||||||
|
|
||||||
|
Beispiele:
|
||||||
|
- 2025_Deutsch_eA_I.pdf
|
||||||
|
- 2025_Deutsch_eA_I_EWH.pdf
|
||||||
|
- 2025_Biologie_gA_1.pdf
|
||||||
|
- 2025_Englisch_eA_HV.pdf (Hörverstehen)
|
||||||
|
"""
|
||||||
|
result = RecognitionResult(
|
||||||
|
success=False,
|
||||||
|
bundesland=Bundesland.NIEDERSACHSEN,
|
||||||
|
fach=None,
|
||||||
|
jahr=None,
|
||||||
|
niveau=None,
|
||||||
|
typ=None,
|
||||||
|
aufgaben_nummer=None,
|
||||||
|
confidence=0.0,
|
||||||
|
raw_filename=filename,
|
||||||
|
suggestions=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bereinige Dateiname
|
||||||
|
name = Path(filename).stem.lower()
|
||||||
|
|
||||||
|
# Extrahiere Jahr (4 Ziffern am Anfang)
|
||||||
|
jahr_match = re.match(r'^(\d{4})', name)
|
||||||
|
if jahr_match:
|
||||||
|
result.jahr = int(jahr_match.group(1))
|
||||||
|
result.confidence += 0.2
|
||||||
|
|
||||||
|
# Extrahiere Fach
|
||||||
|
for fach_key, fach_enum in FACH_NAME_MAPPING.items():
|
||||||
|
if fach_key in name.replace("_", "").replace("-", ""):
|
||||||
|
result.fach = fach_enum
|
||||||
|
result.confidence += 0.3
|
||||||
|
break
|
||||||
|
|
||||||
|
# Extrahiere Niveau (eA/gA)
|
||||||
|
if "_ea" in name or "_ea_" in name or "ea_" in name:
|
||||||
|
result.niveau = Niveau.EA
|
||||||
|
result.confidence += 0.2
|
||||||
|
elif "_ga" in name or "_ga_" in name or "ga_" in name:
|
||||||
|
result.niveau = Niveau.GA
|
||||||
|
result.confidence += 0.2
|
||||||
|
|
||||||
|
# Extrahiere Typ
|
||||||
|
if "_ewh" in name:
|
||||||
|
result.typ = DokumentTyp.ERWARTUNGSHORIZONT
|
||||||
|
result.confidence += 0.2
|
||||||
|
elif "_hv" in name or "hoerverstehen" in name:
|
||||||
|
result.typ = DokumentTyp.HOERVERSTEHEN
|
||||||
|
result.confidence += 0.15
|
||||||
|
elif "_sm" in name or "_me" in name or "sprachmittlung" in name:
|
||||||
|
result.typ = DokumentTyp.SPRACHMITTLUNG
|
||||||
|
result.confidence += 0.15
|
||||||
|
elif "deckblatt" in name:
|
||||||
|
result.typ = DokumentTyp.DECKBLATT
|
||||||
|
result.confidence += 0.15
|
||||||
|
elif "material" in name:
|
||||||
|
result.typ = DokumentTyp.MATERIAL
|
||||||
|
result.confidence += 0.15
|
||||||
|
elif "bewertung" in name:
|
||||||
|
result.typ = DokumentTyp.BEWERTUNGSBOGEN
|
||||||
|
result.confidence += 0.15
|
||||||
|
else:
|
||||||
|
result.typ = DokumentTyp.AUFGABE
|
||||||
|
result.confidence += 0.1
|
||||||
|
|
||||||
|
# Extrahiere Aufgabennummer (römisch oder arabisch)
|
||||||
|
aufgabe_match = re.search(r'_([ivx]+|[1-4][abc]?)(?:_|\.pdf|$)', name, re.IGNORECASE)
|
||||||
|
if aufgabe_match:
|
||||||
|
result.aufgaben_nummer = aufgabe_match.group(1).upper()
|
||||||
|
result.confidence += 0.1
|
||||||
|
|
||||||
|
# Erfolg wenn mindestens Fach und Jahr erkannt
|
||||||
|
if result.fach and result.jahr:
|
||||||
|
result.success = True
|
||||||
|
|
||||||
|
# Normalisiere Confidence auf max 1.0
|
||||||
|
result.confidence = min(result.confidence, 1.0)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
||||||
|
"""Konvertiert internes Dokument zu Response."""
|
||||||
|
return DokumentResponse(
|
||||||
|
id=doc.id,
|
||||||
|
dateiname=doc.dateiname,
|
||||||
|
original_dateiname=doc.original_dateiname,
|
||||||
|
bundesland=doc.bundesland,
|
||||||
|
fach=doc.fach,
|
||||||
|
jahr=doc.jahr,
|
||||||
|
niveau=doc.niveau,
|
||||||
|
typ=doc.typ,
|
||||||
|
aufgaben_nummer=doc.aufgaben_nummer,
|
||||||
|
status=doc.status,
|
||||||
|
confidence=doc.confidence,
|
||||||
|
file_path=doc.file_path,
|
||||||
|
file_size=doc.file_size,
|
||||||
|
indexed=doc.indexed,
|
||||||
|
vector_ids=doc.vector_ids,
|
||||||
|
created_at=doc.created_at,
|
||||||
|
updated_at=doc.updated_at
|
||||||
|
)
|
||||||
@@ -1,413 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to abitur/api.py
|
||||||
Abitur Document Store API - Verwaltung von Abitur-Aufgaben und Erwartungshorizonten.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Unterstützt:
|
_sys.modules[__name__] = _importlib.import_module("abitur.api")
|
||||||
- Bundesland-spezifische Dokumente
|
|
||||||
- Fach, Jahr, Niveau (eA/gA), Aufgabennummer
|
|
||||||
- KI-basierte Dokumentenerkennung
|
|
||||||
- RAG-Integration mit Vector Store
|
|
||||||
|
|
||||||
Dateinamen-Schema (NiBiS Niedersachsen):
|
|
||||||
- 2025_Deutsch_eA_I.pdf - Aufgabe
|
|
||||||
- 2025_Deutsch_eA_I_EWH.pdf - Erwartungshorizont
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
import os
|
|
||||||
import zipfile
|
|
||||||
import tempfile
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, UploadFile, File, Form, BackgroundTasks
|
|
||||||
from fastapi.responses import FileResponse
|
|
||||||
|
|
||||||
from abitur_docs_models import (
|
|
||||||
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
|
||||||
DokumentCreate, DokumentUpdate, DokumentResponse, ImportResult,
|
|
||||||
RecognitionResult, AbiturDokument,
|
|
||||||
FACH_LABELS, DOKUMENT_TYP_LABELS,
|
|
||||||
# Backwards-compatibility re-exports
|
|
||||||
AbiturFach, Anforderungsniveau, DocumentMetadata, AbiturDokumentCompat,
|
|
||||||
)
|
|
||||||
from abitur_docs_recognition import parse_nibis_filename, to_dokument_response
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(
|
|
||||||
prefix="/abitur-docs",
|
|
||||||
tags=["abitur-docs"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Storage directory
|
|
||||||
DOCS_DIR = Path("/tmp/abitur-docs")
|
|
||||||
DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# In-Memory Storage
|
|
||||||
_dokumente: Dict[str, AbiturDokument] = {}
|
|
||||||
|
|
||||||
# Backwards-compatibility alias
|
|
||||||
documents_db = _dokumente
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Private helper (kept local since it references module-level _dokumente)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def _to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
|
||||||
return to_dokument_response(doc)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# API Endpoints - Dokumente
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.post("/upload", response_model=DokumentResponse)
|
|
||||||
async def upload_dokument(
|
|
||||||
file: UploadFile = File(...),
|
|
||||||
bundesland: Optional[Bundesland] = Form(None),
|
|
||||||
fach: Optional[Fach] = Form(None),
|
|
||||||
jahr: Optional[int] = Form(None),
|
|
||||||
niveau: Optional[Niveau] = Form(None),
|
|
||||||
typ: Optional[DokumentTyp] = Form(None),
|
|
||||||
aufgaben_nummer: Optional[str] = Form(None)
|
|
||||||
):
|
|
||||||
"""Lädt ein einzelnes Dokument hoch."""
|
|
||||||
if not file.filename:
|
|
||||||
raise HTTPException(status_code=400, detail="Kein Dateiname")
|
|
||||||
|
|
||||||
recognition = parse_nibis_filename(file.filename)
|
|
||||||
|
|
||||||
final_bundesland = bundesland or recognition.bundesland or Bundesland.NIEDERSACHSEN
|
|
||||||
final_fach = fach or recognition.fach
|
|
||||||
final_jahr = jahr or recognition.jahr or datetime.now().year
|
|
||||||
final_niveau = niveau or recognition.niveau or Niveau.EA
|
|
||||||
final_typ = typ or recognition.typ or DokumentTyp.AUFGABE
|
|
||||||
final_aufgabe = aufgaben_nummer or recognition.aufgaben_nummer
|
|
||||||
|
|
||||||
if not final_fach:
|
|
||||||
raise HTTPException(status_code=400, detail="Fach konnte nicht erkannt werden")
|
|
||||||
|
|
||||||
doc_id = str(uuid.uuid4())
|
|
||||||
file_ext = Path(file.filename).suffix
|
|
||||||
safe_filename = f"{doc_id}{file_ext}"
|
|
||||||
file_path = DOCS_DIR / safe_filename
|
|
||||||
|
|
||||||
content = await file.read()
|
|
||||||
with open(file_path, "wb") as f:
|
|
||||||
f.write(content)
|
|
||||||
|
|
||||||
now = datetime.utcnow()
|
|
||||||
dokument = AbiturDokument(
|
|
||||||
id=doc_id, dateiname=safe_filename, original_dateiname=file.filename,
|
|
||||||
bundesland=final_bundesland, fach=final_fach, jahr=final_jahr,
|
|
||||||
niveau=final_niveau, typ=final_typ, aufgaben_nummer=final_aufgabe,
|
|
||||||
status=VerarbeitungsStatus.RECOGNIZED if recognition.success else VerarbeitungsStatus.PENDING,
|
|
||||||
confidence=recognition.confidence, file_path=str(file_path), file_size=len(content),
|
|
||||||
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
|
||||||
)
|
|
||||||
_dokumente[doc_id] = dokument
|
|
||||||
logger.info(f"Uploaded document {doc_id}: {file.filename}")
|
|
||||||
return _to_dokument_response(dokument)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/import-zip", response_model=ImportResult)
|
|
||||||
async def import_zip(
|
|
||||||
file: UploadFile = File(...),
|
|
||||||
bundesland: Bundesland = Form(Bundesland.NIEDERSACHSEN),
|
|
||||||
background_tasks: BackgroundTasks = None
|
|
||||||
):
|
|
||||||
"""Importiert alle PDFs aus einer ZIP-Datei."""
|
|
||||||
if not file.filename or not file.filename.endswith(".zip"):
|
|
||||||
raise HTTPException(status_code=400, detail="ZIP-Datei erforderlich")
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp:
|
|
||||||
content = await file.read()
|
|
||||||
tmp.write(content)
|
|
||||||
tmp_path = tmp.name
|
|
||||||
|
|
||||||
documents = []
|
|
||||||
total = 0
|
|
||||||
recognized = 0
|
|
||||||
errors = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
with zipfile.ZipFile(tmp_path, 'r') as zip_ref:
|
|
||||||
for zip_info in zip_ref.infolist():
|
|
||||||
if not zip_info.filename.lower().endswith(".pdf"):
|
|
||||||
continue
|
|
||||||
if "__MACOSX" in zip_info.filename or zip_info.filename.startswith("."):
|
|
||||||
continue
|
|
||||||
if "thumbs.db" in zip_info.filename.lower():
|
|
||||||
continue
|
|
||||||
|
|
||||||
total += 1
|
|
||||||
try:
|
|
||||||
basename = Path(zip_info.filename).name
|
|
||||||
recognition = parse_nibis_filename(basename)
|
|
||||||
if not recognition.fach:
|
|
||||||
errors += 1
|
|
||||||
logger.warning(f"Konnte Fach nicht erkennen: {basename}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
doc_id = str(uuid.uuid4())
|
|
||||||
file_ext = Path(basename).suffix
|
|
||||||
safe_filename = f"{doc_id}{file_ext}"
|
|
||||||
file_path = DOCS_DIR / safe_filename
|
|
||||||
|
|
||||||
with zip_ref.open(zip_info.filename) as source:
|
|
||||||
file_content = source.read()
|
|
||||||
with open(file_path, "wb") as target:
|
|
||||||
target.write(file_content)
|
|
||||||
|
|
||||||
now = datetime.utcnow()
|
|
||||||
dokument = AbiturDokument(
|
|
||||||
id=doc_id, dateiname=safe_filename, original_dateiname=basename,
|
|
||||||
bundesland=bundesland, fach=recognition.fach,
|
|
||||||
jahr=recognition.jahr or datetime.now().year,
|
|
||||||
niveau=recognition.niveau or Niveau.EA,
|
|
||||||
typ=recognition.typ or DokumentTyp.AUFGABE,
|
|
||||||
aufgaben_nummer=recognition.aufgaben_nummer,
|
|
||||||
status=VerarbeitungsStatus.RECOGNIZED, confidence=recognition.confidence,
|
|
||||||
file_path=str(file_path), file_size=len(file_content),
|
|
||||||
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
|
||||||
)
|
|
||||||
_dokumente[doc_id] = dokument
|
|
||||||
documents.append(_to_dokument_response(dokument))
|
|
||||||
recognized += 1
|
|
||||||
except Exception as e:
|
|
||||||
errors += 1
|
|
||||||
logger.error(f"Fehler bei {zip_info.filename}: {e}")
|
|
||||||
finally:
|
|
||||||
os.unlink(tmp_path)
|
|
||||||
|
|
||||||
logger.info(f"ZIP-Import: {recognized}/{total} erkannt, {errors} Fehler")
|
|
||||||
return ImportResult(total_files=total, recognized=recognized, errors=errors, documents=documents)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[DokumentResponse])
|
|
||||||
async def list_dokumente(
|
|
||||||
bundesland: Optional[Bundesland] = None, fach: Optional[Fach] = None,
|
|
||||||
jahr: Optional[int] = None, niveau: Optional[Niveau] = None,
|
|
||||||
typ: Optional[DokumentTyp] = None, status: Optional[VerarbeitungsStatus] = None,
|
|
||||||
indexed: Optional[bool] = None
|
|
||||||
):
|
|
||||||
"""Listet Dokumente mit optionalen Filtern."""
|
|
||||||
docs = list(_dokumente.values())
|
|
||||||
if bundesland:
|
|
||||||
docs = [d for d in docs if d.bundesland == bundesland]
|
|
||||||
if fach:
|
|
||||||
docs = [d for d in docs if d.fach == fach]
|
|
||||||
if jahr:
|
|
||||||
docs = [d for d in docs if d.jahr == jahr]
|
|
||||||
if niveau:
|
|
||||||
docs = [d for d in docs if d.niveau == niveau]
|
|
||||||
if typ:
|
|
||||||
docs = [d for d in docs if d.typ == typ]
|
|
||||||
if status:
|
|
||||||
docs = [d for d in docs if d.status == status]
|
|
||||||
if indexed is not None:
|
|
||||||
docs = [d for d in docs if d.indexed == indexed]
|
|
||||||
docs.sort(key=lambda x: (x.jahr, x.fach.value, x.niveau.value), reverse=True)
|
|
||||||
return [_to_dokument_response(d) for d in docs]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{doc_id}", response_model=DokumentResponse)
|
|
||||||
async def get_dokument(doc_id: str):
|
|
||||||
"""Ruft ein Dokument ab."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
return _to_dokument_response(doc)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{doc_id}", response_model=DokumentResponse)
|
|
||||||
async def update_dokument(doc_id: str, data: DokumentUpdate):
|
|
||||||
"""Aktualisiert Dokument-Metadaten."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
if data.bundesland is not None:
|
|
||||||
doc.bundesland = data.bundesland
|
|
||||||
if data.fach is not None:
|
|
||||||
doc.fach = data.fach
|
|
||||||
if data.jahr is not None:
|
|
||||||
doc.jahr = data.jahr
|
|
||||||
if data.niveau is not None:
|
|
||||||
doc.niveau = data.niveau
|
|
||||||
if data.typ is not None:
|
|
||||||
doc.typ = data.typ
|
|
||||||
if data.aufgaben_nummer is not None:
|
|
||||||
doc.aufgaben_nummer = data.aufgaben_nummer
|
|
||||||
if data.status is not None:
|
|
||||||
doc.status = data.status
|
|
||||||
doc.updated_at = datetime.utcnow()
|
|
||||||
return _to_dokument_response(doc)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{doc_id}/confirm", response_model=DokumentResponse)
|
|
||||||
async def confirm_dokument(doc_id: str):
|
|
||||||
"""Bestätigt erkannte Metadaten."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
doc.status = VerarbeitungsStatus.CONFIRMED
|
|
||||||
doc.updated_at = datetime.utcnow()
|
|
||||||
return _to_dokument_response(doc)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{doc_id}/index", response_model=DokumentResponse)
|
|
||||||
async def index_dokument(doc_id: str):
|
|
||||||
"""Indiziert Dokument im Vector Store."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
if doc.status not in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
|
||||||
raise HTTPException(status_code=400, detail="Dokument muss erst bestätigt werden")
|
|
||||||
doc.indexed = True
|
|
||||||
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
|
||||||
doc.status = VerarbeitungsStatus.INDEXED
|
|
||||||
doc.updated_at = datetime.utcnow()
|
|
||||||
logger.info(f"Document {doc_id} indexed (demo)")
|
|
||||||
return _to_dokument_response(doc)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{doc_id}")
|
|
||||||
async def delete_dokument(doc_id: str):
|
|
||||||
"""Löscht ein Dokument."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
if os.path.exists(doc.file_path):
|
|
||||||
os.remove(doc.file_path)
|
|
||||||
del _dokumente[doc_id]
|
|
||||||
return {"status": "deleted", "id": doc_id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{doc_id}/download")
|
|
||||||
async def download_dokument(doc_id: str):
|
|
||||||
"""Lädt Dokument herunter."""
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if not doc:
|
|
||||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
|
||||||
if not os.path.exists(doc.file_path):
|
|
||||||
raise HTTPException(status_code=404, detail="Datei nicht gefunden")
|
|
||||||
return FileResponse(doc.file_path, filename=doc.original_dateiname, media_type="application/pdf")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/recognize", response_model=RecognitionResult)
|
|
||||||
async def recognize_filename(filename: str):
|
|
||||||
"""Erkennt Metadaten aus einem Dateinamen."""
|
|
||||||
return parse_nibis_filename(filename)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/bulk-confirm")
|
|
||||||
async def bulk_confirm(doc_ids: List[str]):
|
|
||||||
"""Bestätigt mehrere Dokumente auf einmal."""
|
|
||||||
confirmed = 0
|
|
||||||
for doc_id in doc_ids:
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if doc and doc.status == VerarbeitungsStatus.RECOGNIZED:
|
|
||||||
doc.status = VerarbeitungsStatus.CONFIRMED
|
|
||||||
doc.updated_at = datetime.utcnow()
|
|
||||||
confirmed += 1
|
|
||||||
return {"confirmed": confirmed, "total": len(doc_ids)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/bulk-index")
|
|
||||||
async def bulk_index(doc_ids: List[str]):
|
|
||||||
"""Indiziert mehrere Dokumente auf einmal."""
|
|
||||||
indexed = 0
|
|
||||||
for doc_id in doc_ids:
|
|
||||||
doc = _dokumente.get(doc_id)
|
|
||||||
if doc and doc.status in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
|
||||||
doc.indexed = True
|
|
||||||
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
|
||||||
doc.status = VerarbeitungsStatus.INDEXED
|
|
||||||
doc.updated_at = datetime.utcnow()
|
|
||||||
indexed += 1
|
|
||||||
return {"indexed": indexed, "total": len(doc_ids)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/stats/overview")
|
|
||||||
async def get_stats_overview():
|
|
||||||
"""Gibt Übersicht über alle Dokumente."""
|
|
||||||
docs = list(_dokumente.values())
|
|
||||||
by_bundesland: Dict[str, int] = {}
|
|
||||||
by_fach: Dict[str, int] = {}
|
|
||||||
by_jahr: Dict[int, int] = {}
|
|
||||||
by_status: Dict[str, int] = {}
|
|
||||||
for doc in docs:
|
|
||||||
by_bundesland[doc.bundesland.value] = by_bundesland.get(doc.bundesland.value, 0) + 1
|
|
||||||
by_fach[doc.fach.value] = by_fach.get(doc.fach.value, 0) + 1
|
|
||||||
by_jahr[doc.jahr] = by_jahr.get(doc.jahr, 0) + 1
|
|
||||||
by_status[doc.status.value] = by_status.get(doc.status.value, 0) + 1
|
|
||||||
return {
|
|
||||||
"total": len(docs), "indexed": sum(1 for d in docs if d.indexed),
|
|
||||||
"pending": sum(1 for d in docs if d.status == VerarbeitungsStatus.PENDING),
|
|
||||||
"by_bundesland": by_bundesland, "by_fach": by_fach, "by_jahr": by_jahr, "by_status": by_status
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/search", response_model=List[DokumentResponse])
|
|
||||||
async def search_dokumente(
|
|
||||||
bundesland: Bundesland, fach: Fach, jahr: Optional[int] = None,
|
|
||||||
niveau: Optional[Niveau] = None, nur_indexed: bool = True
|
|
||||||
):
|
|
||||||
"""Sucht Dokumente für Klausur-Korrektur."""
|
|
||||||
docs = [d for d in _dokumente.values() if d.bundesland == bundesland and d.fach == fach]
|
|
||||||
if jahr:
|
|
||||||
docs = [d for d in docs if d.jahr == jahr]
|
|
||||||
if niveau:
|
|
||||||
docs = [d for d in docs if d.niveau == niveau]
|
|
||||||
if nur_indexed:
|
|
||||||
docs = [d for d in docs if d.indexed]
|
|
||||||
|
|
||||||
aufgaben = [d for d in docs if d.typ == DokumentTyp.AUFGABE]
|
|
||||||
ewh = [d for d in docs if d.typ == DokumentTyp.ERWARTUNGSHORIZONT]
|
|
||||||
andere = [d for d in docs if d.typ not in [DokumentTyp.AUFGABE, DokumentTyp.ERWARTUNGSHORIZONT]]
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for aufgabe in aufgaben:
|
|
||||||
result.append(_to_dokument_response(aufgabe))
|
|
||||||
matching_ewh = next(
|
|
||||||
(e for e in ewh if e.jahr == aufgabe.jahr and e.niveau == aufgabe.niveau
|
|
||||||
and e.aufgaben_nummer == aufgabe.aufgaben_nummer), None
|
|
||||||
)
|
|
||||||
if matching_ewh:
|
|
||||||
result.append(_to_dokument_response(matching_ewh))
|
|
||||||
for e in ewh:
|
|
||||||
if _to_dokument_response(e) not in result:
|
|
||||||
result.append(_to_dokument_response(e))
|
|
||||||
for a in andere:
|
|
||||||
result.append(_to_dokument_response(a))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/enums/bundeslaender")
|
|
||||||
async def get_bundeslaender():
|
|
||||||
"""Gibt alle Bundesländer zurück."""
|
|
||||||
return [{"value": b.value, "label": b.value.replace("_", " ").title()} for b in Bundesland]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/enums/faecher")
|
|
||||||
async def get_faecher():
|
|
||||||
"""Gibt alle Fächer zurück."""
|
|
||||||
return [{"value": f.value, "label": FACH_LABELS.get(f, f.value)} for f in Fach]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/enums/niveaus")
|
|
||||||
async def get_niveaus():
|
|
||||||
"""Gibt alle Niveaus zurück."""
|
|
||||||
return [
|
|
||||||
{"value": "eA", "label": "eA (erhöhtes Anforderungsniveau)"},
|
|
||||||
{"value": "gA", "label": "gA (grundlegendes Anforderungsniveau)"}
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/enums/typen")
|
|
||||||
async def get_typen():
|
|
||||||
"""Gibt alle Dokumenttypen zurück."""
|
|
||||||
return [{"value": t.value, "label": DOKUMENT_TYP_LABELS.get(t, t.value)} for t in DokumentTyp]
|
|
||||||
|
|||||||
@@ -1,327 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to abitur/models.py
|
||||||
Abitur Document Store - Enums, Pydantic Models, Data Classes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Shared types for abitur_docs_api and abitur_docs_recognition.
|
_sys.modules[__name__] = _importlib.import_module("abitur.models")
|
||||||
"""
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Dict, Any, Optional
|
|
||||||
from enum import Enum
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Enums
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class Bundesland(str, Enum):
|
|
||||||
"""Bundesländer mit Zentralabitur."""
|
|
||||||
NIEDERSACHSEN = "niedersachsen"
|
|
||||||
BAYERN = "bayern"
|
|
||||||
BADEN_WUERTTEMBERG = "baden_wuerttemberg"
|
|
||||||
NORDRHEIN_WESTFALEN = "nordrhein_westfalen"
|
|
||||||
HESSEN = "hessen"
|
|
||||||
SACHSEN = "sachsen"
|
|
||||||
THUERINGEN = "thueringen"
|
|
||||||
BERLIN = "berlin"
|
|
||||||
HAMBURG = "hamburg"
|
|
||||||
SCHLESWIG_HOLSTEIN = "schleswig_holstein"
|
|
||||||
BREMEN = "bremen"
|
|
||||||
BRANDENBURG = "brandenburg"
|
|
||||||
MECKLENBURG_VORPOMMERN = "mecklenburg_vorpommern"
|
|
||||||
SACHSEN_ANHALT = "sachsen_anhalt"
|
|
||||||
RHEINLAND_PFALZ = "rheinland_pfalz"
|
|
||||||
SAARLAND = "saarland"
|
|
||||||
|
|
||||||
|
|
||||||
class Fach(str, Enum):
|
|
||||||
"""Abiturfächer."""
|
|
||||||
DEUTSCH = "deutsch"
|
|
||||||
ENGLISCH = "englisch"
|
|
||||||
MATHEMATIK = "mathematik"
|
|
||||||
BIOLOGIE = "biologie"
|
|
||||||
CHEMIE = "chemie"
|
|
||||||
PHYSIK = "physik"
|
|
||||||
GESCHICHTE = "geschichte"
|
|
||||||
ERDKUNDE = "erdkunde"
|
|
||||||
POLITIK_WIRTSCHAFT = "politik_wirtschaft"
|
|
||||||
FRANZOESISCH = "franzoesisch"
|
|
||||||
SPANISCH = "spanisch"
|
|
||||||
LATEIN = "latein"
|
|
||||||
GRIECHISCH = "griechisch"
|
|
||||||
KUNST = "kunst"
|
|
||||||
MUSIK = "musik"
|
|
||||||
SPORT = "sport"
|
|
||||||
INFORMATIK = "informatik"
|
|
||||||
EV_RELIGION = "ev_religion"
|
|
||||||
KATH_RELIGION = "kath_religion"
|
|
||||||
WERTE_NORMEN = "werte_normen"
|
|
||||||
BRC = "brc"
|
|
||||||
BVW = "bvw"
|
|
||||||
ERNAEHRUNG = "ernaehrung"
|
|
||||||
MECHATRONIK = "mechatronik"
|
|
||||||
GESUNDHEIT_PFLEGE = "gesundheit_pflege"
|
|
||||||
PAEDAGOGIK_PSYCHOLOGIE = "paedagogik_psychologie"
|
|
||||||
|
|
||||||
|
|
||||||
class Niveau(str, Enum):
|
|
||||||
"""Anforderungsniveau."""
|
|
||||||
EA = "eA"
|
|
||||||
GA = "gA"
|
|
||||||
|
|
||||||
|
|
||||||
class DokumentTyp(str, Enum):
|
|
||||||
"""Dokumenttyp."""
|
|
||||||
AUFGABE = "aufgabe"
|
|
||||||
ERWARTUNGSHORIZONT = "erwartungshorizont"
|
|
||||||
DECKBLATT = "deckblatt"
|
|
||||||
MATERIAL = "material"
|
|
||||||
HOERVERSTEHEN = "hoerverstehen"
|
|
||||||
SPRACHMITTLUNG = "sprachmittlung"
|
|
||||||
BEWERTUNGSBOGEN = "bewertungsbogen"
|
|
||||||
|
|
||||||
|
|
||||||
class VerarbeitungsStatus(str, Enum):
|
|
||||||
"""Status der Dokumentenverarbeitung."""
|
|
||||||
PENDING = "pending"
|
|
||||||
PROCESSING = "processing"
|
|
||||||
RECOGNIZED = "recognized"
|
|
||||||
CONFIRMED = "confirmed"
|
|
||||||
INDEXED = "indexed"
|
|
||||||
ERROR = "error"
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Fach-Mapping für Dateinamen
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
FACH_NAME_MAPPING = {
|
|
||||||
"deutsch": Fach.DEUTSCH,
|
|
||||||
"englisch": Fach.ENGLISCH,
|
|
||||||
"mathe": Fach.MATHEMATIK,
|
|
||||||
"mathematik": Fach.MATHEMATIK,
|
|
||||||
"biologie": Fach.BIOLOGIE,
|
|
||||||
"bio": Fach.BIOLOGIE,
|
|
||||||
"chemie": Fach.CHEMIE,
|
|
||||||
"physik": Fach.PHYSIK,
|
|
||||||
"geschichte": Fach.GESCHICHTE,
|
|
||||||
"erdkunde": Fach.ERDKUNDE,
|
|
||||||
"geographie": Fach.ERDKUNDE,
|
|
||||||
"politikwirtschaft": Fach.POLITIK_WIRTSCHAFT,
|
|
||||||
"politik": Fach.POLITIK_WIRTSCHAFT,
|
|
||||||
"franzoesisch": Fach.FRANZOESISCH,
|
|
||||||
"franz": Fach.FRANZOESISCH,
|
|
||||||
"spanisch": Fach.SPANISCH,
|
|
||||||
"latein": Fach.LATEIN,
|
|
||||||
"griechisch": Fach.GRIECHISCH,
|
|
||||||
"kunst": Fach.KUNST,
|
|
||||||
"musik": Fach.MUSIK,
|
|
||||||
"sport": Fach.SPORT,
|
|
||||||
"informatik": Fach.INFORMATIK,
|
|
||||||
"evreligion": Fach.EV_RELIGION,
|
|
||||||
"kathreligion": Fach.KATH_RELIGION,
|
|
||||||
"wertenormen": Fach.WERTE_NORMEN,
|
|
||||||
"brc": Fach.BRC,
|
|
||||||
"bvw": Fach.BVW,
|
|
||||||
"ernaehrung": Fach.ERNAEHRUNG,
|
|
||||||
"mecha": Fach.MECHATRONIK,
|
|
||||||
"mechatronik": Fach.MECHATRONIK,
|
|
||||||
"technikmecha": Fach.MECHATRONIK,
|
|
||||||
"gespfl": Fach.GESUNDHEIT_PFLEGE,
|
|
||||||
"paedpsych": Fach.PAEDAGOGIK_PSYCHOLOGIE,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Pydantic Models
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class DokumentCreate(BaseModel):
|
|
||||||
"""Manuelles Erstellen eines Dokuments."""
|
|
||||||
bundesland: Bundesland
|
|
||||||
fach: Fach
|
|
||||||
jahr: int = Field(ge=2000, le=2100)
|
|
||||||
niveau: Niveau
|
|
||||||
typ: DokumentTyp
|
|
||||||
aufgaben_nummer: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class DokumentUpdate(BaseModel):
|
|
||||||
"""Update für erkannte Metadaten."""
|
|
||||||
bundesland: Optional[Bundesland] = None
|
|
||||||
fach: Optional[Fach] = None
|
|
||||||
jahr: Optional[int] = None
|
|
||||||
niveau: Optional[Niveau] = None
|
|
||||||
typ: Optional[DokumentTyp] = None
|
|
||||||
aufgaben_nummer: Optional[str] = None
|
|
||||||
status: Optional[VerarbeitungsStatus] = None
|
|
||||||
|
|
||||||
|
|
||||||
class DokumentResponse(BaseModel):
|
|
||||||
"""Response für ein Dokument."""
|
|
||||||
id: str
|
|
||||||
dateiname: str
|
|
||||||
original_dateiname: str
|
|
||||||
bundesland: Bundesland
|
|
||||||
fach: Fach
|
|
||||||
jahr: int
|
|
||||||
niveau: Niveau
|
|
||||||
typ: DokumentTyp
|
|
||||||
aufgaben_nummer: Optional[str]
|
|
||||||
status: VerarbeitungsStatus
|
|
||||||
confidence: float
|
|
||||||
file_path: str
|
|
||||||
file_size: int
|
|
||||||
indexed: bool
|
|
||||||
vector_ids: List[str]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class ImportResult(BaseModel):
|
|
||||||
"""Ergebnis eines ZIP-Imports."""
|
|
||||||
total_files: int
|
|
||||||
recognized: int
|
|
||||||
errors: int
|
|
||||||
documents: List[DokumentResponse]
|
|
||||||
|
|
||||||
|
|
||||||
class RecognitionResult(BaseModel):
|
|
||||||
"""Ergebnis der Dokumentenerkennung."""
|
|
||||||
success: bool
|
|
||||||
bundesland: Optional[Bundesland]
|
|
||||||
fach: Optional[Fach]
|
|
||||||
jahr: Optional[int]
|
|
||||||
niveau: Optional[Niveau]
|
|
||||||
typ: Optional[DokumentTyp]
|
|
||||||
aufgaben_nummer: Optional[str]
|
|
||||||
confidence: float
|
|
||||||
raw_filename: str
|
|
||||||
suggestions: List[Dict[str, Any]]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extracted(self) -> Dict[str, Any]:
|
|
||||||
"""Backwards-compatible property returning extracted values as dict."""
|
|
||||||
result = {}
|
|
||||||
if self.bundesland:
|
|
||||||
result["bundesland"] = self.bundesland.value
|
|
||||||
if self.fach:
|
|
||||||
result["fach"] = self.fach.value
|
|
||||||
if self.jahr:
|
|
||||||
result["jahr"] = self.jahr
|
|
||||||
if self.niveau:
|
|
||||||
result["niveau"] = self.niveau.value
|
|
||||||
if self.typ:
|
|
||||||
result["typ"] = self.typ.value
|
|
||||||
if self.aufgaben_nummer:
|
|
||||||
result["aufgaben_nummer"] = self.aufgaben_nummer
|
|
||||||
return result
|
|
||||||
|
|
||||||
@property
|
|
||||||
def method(self) -> str:
|
|
||||||
"""Backwards-compatible property for recognition method."""
|
|
||||||
return "filename_pattern"
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Internal Data Classes
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AbiturDokument:
|
|
||||||
"""Internes Dokument."""
|
|
||||||
id: str
|
|
||||||
dateiname: str
|
|
||||||
original_dateiname: str
|
|
||||||
bundesland: Bundesland
|
|
||||||
fach: Fach
|
|
||||||
jahr: int
|
|
||||||
niveau: Niveau
|
|
||||||
typ: DokumentTyp
|
|
||||||
aufgaben_nummer: Optional[str]
|
|
||||||
status: VerarbeitungsStatus
|
|
||||||
confidence: float
|
|
||||||
file_path: str
|
|
||||||
file_size: int
|
|
||||||
indexed: bool
|
|
||||||
vector_ids: List[str]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Backwards-compatibility aliases (used by tests)
|
|
||||||
# ============================================================================
|
|
||||||
AbiturFach = Fach
|
|
||||||
Anforderungsniveau = Niveau
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentMetadata(BaseModel):
|
|
||||||
"""Backwards-compatible metadata model for tests."""
|
|
||||||
jahr: Optional[int] = None
|
|
||||||
bundesland: Optional[str] = None
|
|
||||||
fach: Optional[str] = None
|
|
||||||
niveau: Optional[str] = None
|
|
||||||
dokument_typ: Optional[str] = None
|
|
||||||
aufgaben_nummer: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class AbiturDokumentCompat(BaseModel):
|
|
||||||
"""Backwards-compatible AbiturDokument model for tests."""
|
|
||||||
id: str
|
|
||||||
filename: str
|
|
||||||
file_path: str
|
|
||||||
metadata: DocumentMetadata
|
|
||||||
status: VerarbeitungsStatus
|
|
||||||
recognition_result: Optional[RecognitionResult] = None
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Fach Labels (für Frontend Enum-Endpoint)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
FACH_LABELS = {
|
|
||||||
Fach.DEUTSCH: "Deutsch",
|
|
||||||
Fach.ENGLISCH: "Englisch",
|
|
||||||
Fach.MATHEMATIK: "Mathematik",
|
|
||||||
Fach.BIOLOGIE: "Biologie",
|
|
||||||
Fach.CHEMIE: "Chemie",
|
|
||||||
Fach.PHYSIK: "Physik",
|
|
||||||
Fach.GESCHICHTE: "Geschichte",
|
|
||||||
Fach.ERDKUNDE: "Erdkunde",
|
|
||||||
Fach.POLITIK_WIRTSCHAFT: "Politik-Wirtschaft",
|
|
||||||
Fach.FRANZOESISCH: "Französisch",
|
|
||||||
Fach.SPANISCH: "Spanisch",
|
|
||||||
Fach.LATEIN: "Latein",
|
|
||||||
Fach.GRIECHISCH: "Griechisch",
|
|
||||||
Fach.KUNST: "Kunst",
|
|
||||||
Fach.MUSIK: "Musik",
|
|
||||||
Fach.SPORT: "Sport",
|
|
||||||
Fach.INFORMATIK: "Informatik",
|
|
||||||
Fach.EV_RELIGION: "Ev. Religion",
|
|
||||||
Fach.KATH_RELIGION: "Kath. Religion",
|
|
||||||
Fach.WERTE_NORMEN: "Werte und Normen",
|
|
||||||
Fach.BRC: "BRC (Betriebswirtschaft)",
|
|
||||||
Fach.BVW: "BVW (Volkswirtschaft)",
|
|
||||||
Fach.ERNAEHRUNG: "Ernährung",
|
|
||||||
Fach.MECHATRONIK: "Mechatronik",
|
|
||||||
Fach.GESUNDHEIT_PFLEGE: "Gesundheit-Pflege",
|
|
||||||
Fach.PAEDAGOGIK_PSYCHOLOGIE: "Pädagogik-Psychologie",
|
|
||||||
}
|
|
||||||
|
|
||||||
DOKUMENT_TYP_LABELS = {
|
|
||||||
DokumentTyp.AUFGABE: "Aufgabe",
|
|
||||||
DokumentTyp.ERWARTUNGSHORIZONT: "Erwartungshorizont",
|
|
||||||
DokumentTyp.DECKBLATT: "Deckblatt",
|
|
||||||
DokumentTyp.MATERIAL: "Material",
|
|
||||||
DokumentTyp.HOERVERSTEHEN: "Hörverstehen",
|
|
||||||
DokumentTyp.SPRACHMITTLUNG: "Sprachmittlung",
|
|
||||||
DokumentTyp.BEWERTUNGSBOGEN: "Bewertungsbogen",
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,124 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to abitur/recognition.py
|
||||||
Abitur Document Store - Dateinamen-Erkennung und Helfer.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Erkennt Metadaten aus NiBiS-Dateinamen (Niedersachsen).
|
_sys.modules[__name__] = _importlib.import_module("abitur.recognition")
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from typing import Dict, Any
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from abitur_docs_models import (
|
|
||||||
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
|
||||||
RecognitionResult, AbiturDokument, DokumentResponse,
|
|
||||||
FACH_NAME_MAPPING,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_nibis_filename(filename: str) -> RecognitionResult:
|
|
||||||
"""
|
|
||||||
Erkennt Metadaten aus NiBiS-Dateinamen.
|
|
||||||
|
|
||||||
Beispiele:
|
|
||||||
- 2025_Deutsch_eA_I.pdf
|
|
||||||
- 2025_Deutsch_eA_I_EWH.pdf
|
|
||||||
- 2025_Biologie_gA_1.pdf
|
|
||||||
- 2025_Englisch_eA_HV.pdf (Hörverstehen)
|
|
||||||
"""
|
|
||||||
result = RecognitionResult(
|
|
||||||
success=False,
|
|
||||||
bundesland=Bundesland.NIEDERSACHSEN,
|
|
||||||
fach=None,
|
|
||||||
jahr=None,
|
|
||||||
niveau=None,
|
|
||||||
typ=None,
|
|
||||||
aufgaben_nummer=None,
|
|
||||||
confidence=0.0,
|
|
||||||
raw_filename=filename,
|
|
||||||
suggestions=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Bereinige Dateiname
|
|
||||||
name = Path(filename).stem.lower()
|
|
||||||
|
|
||||||
# Extrahiere Jahr (4 Ziffern am Anfang)
|
|
||||||
jahr_match = re.match(r'^(\d{4})', name)
|
|
||||||
if jahr_match:
|
|
||||||
result.jahr = int(jahr_match.group(1))
|
|
||||||
result.confidence += 0.2
|
|
||||||
|
|
||||||
# Extrahiere Fach
|
|
||||||
for fach_key, fach_enum in FACH_NAME_MAPPING.items():
|
|
||||||
if fach_key in name.replace("_", "").replace("-", ""):
|
|
||||||
result.fach = fach_enum
|
|
||||||
result.confidence += 0.3
|
|
||||||
break
|
|
||||||
|
|
||||||
# Extrahiere Niveau (eA/gA)
|
|
||||||
if "_ea" in name or "_ea_" in name or "ea_" in name:
|
|
||||||
result.niveau = Niveau.EA
|
|
||||||
result.confidence += 0.2
|
|
||||||
elif "_ga" in name or "_ga_" in name or "ga_" in name:
|
|
||||||
result.niveau = Niveau.GA
|
|
||||||
result.confidence += 0.2
|
|
||||||
|
|
||||||
# Extrahiere Typ
|
|
||||||
if "_ewh" in name:
|
|
||||||
result.typ = DokumentTyp.ERWARTUNGSHORIZONT
|
|
||||||
result.confidence += 0.2
|
|
||||||
elif "_hv" in name or "hoerverstehen" in name:
|
|
||||||
result.typ = DokumentTyp.HOERVERSTEHEN
|
|
||||||
result.confidence += 0.15
|
|
||||||
elif "_sm" in name or "_me" in name or "sprachmittlung" in name:
|
|
||||||
result.typ = DokumentTyp.SPRACHMITTLUNG
|
|
||||||
result.confidence += 0.15
|
|
||||||
elif "deckblatt" in name:
|
|
||||||
result.typ = DokumentTyp.DECKBLATT
|
|
||||||
result.confidence += 0.15
|
|
||||||
elif "material" in name:
|
|
||||||
result.typ = DokumentTyp.MATERIAL
|
|
||||||
result.confidence += 0.15
|
|
||||||
elif "bewertung" in name:
|
|
||||||
result.typ = DokumentTyp.BEWERTUNGSBOGEN
|
|
||||||
result.confidence += 0.15
|
|
||||||
else:
|
|
||||||
result.typ = DokumentTyp.AUFGABE
|
|
||||||
result.confidence += 0.1
|
|
||||||
|
|
||||||
# Extrahiere Aufgabennummer (römisch oder arabisch)
|
|
||||||
aufgabe_match = re.search(r'_([ivx]+|[1-4][abc]?)(?:_|\.pdf|$)', name, re.IGNORECASE)
|
|
||||||
if aufgabe_match:
|
|
||||||
result.aufgaben_nummer = aufgabe_match.group(1).upper()
|
|
||||||
result.confidence += 0.1
|
|
||||||
|
|
||||||
# Erfolg wenn mindestens Fach und Jahr erkannt
|
|
||||||
if result.fach and result.jahr:
|
|
||||||
result.success = True
|
|
||||||
|
|
||||||
# Normalisiere Confidence auf max 1.0
|
|
||||||
result.confidence = min(result.confidence, 1.0)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
|
||||||
"""Konvertiert internes Dokument zu Response."""
|
|
||||||
return DokumentResponse(
|
|
||||||
id=doc.id,
|
|
||||||
dateiname=doc.dateiname,
|
|
||||||
original_dateiname=doc.original_dateiname,
|
|
||||||
bundesland=doc.bundesland,
|
|
||||||
fach=doc.fach,
|
|
||||||
jahr=doc.jahr,
|
|
||||||
niveau=doc.niveau,
|
|
||||||
typ=doc.typ,
|
|
||||||
aufgaben_nummer=doc.aufgaben_nummer,
|
|
||||||
status=doc.status,
|
|
||||||
confidence=doc.confidence,
|
|
||||||
file_path=doc.file_path,
|
|
||||||
file_size=doc.file_size,
|
|
||||||
indexed=doc.indexed,
|
|
||||||
vector_ids=doc.vector_ids,
|
|
||||||
created_at=doc.created_at,
|
|
||||||
updated_at=doc.updated_at
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,340 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to letters/certificates_api.py
|
||||||
Certificates API - Zeugnisverwaltung fuer BreakPilot.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Split into:
|
_sys.modules[__name__] = _importlib.import_module("letters.certificates_api")
|
||||||
- certificates_models.py: Enums, Pydantic models, helper functions
|
|
||||||
- certificates_api.py (this file): API endpoints and in-memory store
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, Dict, List, Any
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Response, Query
|
|
||||||
|
|
||||||
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
|
||||||
try:
|
|
||||||
from services.pdf_service import generate_certificate_pdf, SchoolInfo
|
|
||||||
_pdf_available = True
|
|
||||||
except (ImportError, OSError):
|
|
||||||
generate_certificate_pdf = None # type: ignore
|
|
||||||
SchoolInfo = None # type: ignore
|
|
||||||
_pdf_available = False
|
|
||||||
|
|
||||||
from certificates_models import (
|
|
||||||
CertificateType,
|
|
||||||
CertificateStatus,
|
|
||||||
BehaviorGrade,
|
|
||||||
CertificateCreateRequest,
|
|
||||||
CertificateUpdateRequest,
|
|
||||||
CertificateResponse,
|
|
||||||
CertificateListResponse,
|
|
||||||
GradeStatistics,
|
|
||||||
get_type_label as _get_type_label,
|
|
||||||
calculate_average as _calculate_average,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/certificates", tags=["certificates"])
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# In-Memory Storage (Prototyp - spaeter durch DB ersetzen)
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
_certificates_store: Dict[str, Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def _get_certificate(cert_id: str) -> Dict[str, Any]:
|
|
||||||
"""Holt Zeugnis aus dem Store."""
|
|
||||||
if cert_id not in _certificates_store:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Zeugnis mit ID {cert_id} nicht gefunden")
|
|
||||||
return _certificates_store[cert_id]
|
|
||||||
|
|
||||||
|
|
||||||
def _save_certificate(cert_data: Dict[str, Any]) -> str:
|
|
||||||
"""Speichert Zeugnis und gibt ID zurueck."""
|
|
||||||
cert_id = cert_data.get("id") or str(uuid.uuid4())
|
|
||||||
cert_data["id"] = cert_id
|
|
||||||
cert_data["updated_at"] = datetime.now()
|
|
||||||
if "created_at" not in cert_data:
|
|
||||||
cert_data["created_at"] = datetime.now()
|
|
||||||
_certificates_store[cert_id] = cert_data
|
|
||||||
return cert_id
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# API Endpoints
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
@router.post("/", response_model=CertificateResponse)
|
|
||||||
async def create_certificate(request: CertificateCreateRequest):
|
|
||||||
"""Erstellt ein neues Zeugnis."""
|
|
||||||
logger.info(f"Creating new certificate for student: {request.student_name}")
|
|
||||||
|
|
||||||
subjects_list = [s.model_dump() for s in request.subjects]
|
|
||||||
|
|
||||||
cert_data = {
|
|
||||||
"student_id": request.student_id,
|
|
||||||
"student_name": request.student_name,
|
|
||||||
"student_birthdate": request.student_birthdate,
|
|
||||||
"student_class": request.student_class,
|
|
||||||
"school_year": request.school_year,
|
|
||||||
"certificate_type": request.certificate_type,
|
|
||||||
"subjects": subjects_list,
|
|
||||||
"attendance": request.attendance.model_dump(),
|
|
||||||
"remarks": request.remarks,
|
|
||||||
"class_teacher": request.class_teacher,
|
|
||||||
"principal": request.principal,
|
|
||||||
"school_info": request.school_info.model_dump() if request.school_info else None,
|
|
||||||
"issue_date": request.issue_date or datetime.now().strftime("%d.%m.%Y"),
|
|
||||||
"social_behavior": request.social_behavior,
|
|
||||||
"work_behavior": request.work_behavior,
|
|
||||||
"status": CertificateStatus.DRAFT,
|
|
||||||
"average_grade": _calculate_average(subjects_list),
|
|
||||||
"pdf_path": None,
|
|
||||||
"dsms_cid": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
cert_id = _save_certificate(cert_data)
|
|
||||||
cert_data["id"] = cert_id
|
|
||||||
logger.info(f"Certificate created with ID: {cert_id}")
|
|
||||||
return CertificateResponse(**cert_data)
|
|
||||||
|
|
||||||
|
|
||||||
# IMPORTANT: Static routes must be defined BEFORE dynamic /{cert_id} route
|
|
||||||
@router.get("/types")
|
|
||||||
async def get_certificate_types():
|
|
||||||
"""Gibt alle verfuegbaren Zeugnistypen zurueck."""
|
|
||||||
return {"types": [{"value": t.value, "label": _get_type_label(t)} for t in CertificateType]}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/behavior-grades")
|
|
||||||
async def get_behavior_grades():
|
|
||||||
"""Gibt alle verfuegbaren Verhaltensnoten zurueck."""
|
|
||||||
labels = {
|
|
||||||
BehaviorGrade.A: "A - Sehr gut", BehaviorGrade.B: "B - Gut",
|
|
||||||
BehaviorGrade.C: "C - Befriedigend", BehaviorGrade.D: "D - Verbesserungswuerdig"
|
|
||||||
}
|
|
||||||
return {"grades": [{"value": g.value, "label": labels[g]} for g in BehaviorGrade]}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{cert_id}", response_model=CertificateResponse)
|
|
||||||
async def get_certificate(cert_id: str):
|
|
||||||
"""Laedt ein gespeichertes Zeugnis."""
|
|
||||||
logger.info(f"Getting certificate: {cert_id}")
|
|
||||||
return CertificateResponse(**_get_certificate(cert_id))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=CertificateListResponse)
|
|
||||||
async def list_certificates(
|
|
||||||
student_id: Optional[str] = Query(None),
|
|
||||||
class_name: Optional[str] = Query(None),
|
|
||||||
school_year: Optional[str] = Query(None),
|
|
||||||
certificate_type: Optional[CertificateType] = Query(None),
|
|
||||||
status: Optional[CertificateStatus] = Query(None),
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
page_size: int = Query(20, ge=1, le=100)
|
|
||||||
):
|
|
||||||
"""Listet alle gespeicherten Zeugnisse mit optionalen Filtern."""
|
|
||||||
logger.info("Listing certificates with filters")
|
|
||||||
|
|
||||||
filtered_certs = list(_certificates_store.values())
|
|
||||||
if student_id:
|
|
||||||
filtered_certs = [c for c in filtered_certs if c.get("student_id") == student_id]
|
|
||||||
if class_name:
|
|
||||||
filtered_certs = [c for c in filtered_certs if c.get("student_class") == class_name]
|
|
||||||
if school_year:
|
|
||||||
filtered_certs = [c for c in filtered_certs if c.get("school_year") == school_year]
|
|
||||||
if certificate_type:
|
|
||||||
filtered_certs = [c for c in filtered_certs if c.get("certificate_type") == certificate_type]
|
|
||||||
if status:
|
|
||||||
filtered_certs = [c for c in filtered_certs if c.get("status") == status]
|
|
||||||
|
|
||||||
filtered_certs.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
|
||||||
total = len(filtered_certs)
|
|
||||||
start = (page - 1) * page_size
|
|
||||||
paginated_certs = filtered_certs[start:start + page_size]
|
|
||||||
|
|
||||||
return CertificateListResponse(
|
|
||||||
certificates=[CertificateResponse(**c) for c in paginated_certs],
|
|
||||||
total=total, page=page, page_size=page_size
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{cert_id}", response_model=CertificateResponse)
|
|
||||||
async def update_certificate(cert_id: str, request: CertificateUpdateRequest):
|
|
||||||
"""Aktualisiert ein bestehendes Zeugnis."""
|
|
||||||
logger.info(f"Updating certificate: {cert_id}")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
|
|
||||||
if cert_data.get("status") in [CertificateStatus.ISSUED, CertificateStatus.ARCHIVED]:
|
|
||||||
raise HTTPException(status_code=400, detail="Zeugnis wurde bereits ausgestellt und kann nicht mehr bearbeitet werden")
|
|
||||||
|
|
||||||
update_data = request.model_dump(exclude_unset=True)
|
|
||||||
for key, value in update_data.items():
|
|
||||||
if value is not None:
|
|
||||||
if key == "subjects":
|
|
||||||
cert_data[key] = [s if isinstance(s, dict) else s.model_dump() for s in value]
|
|
||||||
cert_data["average_grade"] = _calculate_average(cert_data["subjects"])
|
|
||||||
elif key == "attendance":
|
|
||||||
cert_data[key] = value if isinstance(value, dict) else value.model_dump()
|
|
||||||
else:
|
|
||||||
cert_data[key] = value
|
|
||||||
|
|
||||||
_save_certificate(cert_data)
|
|
||||||
return CertificateResponse(**cert_data)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{cert_id}")
|
|
||||||
async def delete_certificate(cert_id: str):
|
|
||||||
"""Loescht ein Zeugnis. Nur Entwuerfe koennen geloescht werden."""
|
|
||||||
logger.info(f"Deleting certificate: {cert_id}")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
if cert_data.get("status") != CertificateStatus.DRAFT:
|
|
||||||
raise HTTPException(status_code=400, detail="Nur Zeugnis-Entwuerfe koennen geloescht werden")
|
|
||||||
del _certificates_store[cert_id]
|
|
||||||
return {"message": f"Zeugnis {cert_id} wurde geloescht"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{cert_id}/export-pdf")
|
|
||||||
async def export_certificate_pdf(cert_id: str):
|
|
||||||
"""Exportiert ein Zeugnis als PDF."""
|
|
||||||
logger.info(f"Exporting certificate {cert_id} as PDF")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
|
|
||||||
try:
|
|
||||||
pdf_bytes = generate_certificate_pdf(cert_data)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error generating PDF: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Fehler bei PDF-Generierung: {str(e)}")
|
|
||||||
|
|
||||||
student_name = cert_data.get("student_name", "Zeugnis").replace(" ", "_")
|
|
||||||
school_year = cert_data.get("school_year", "").replace("/", "-")
|
|
||||||
cert_type = cert_data.get("certificate_type", "zeugnis")
|
|
||||||
filename = f"Zeugnis_{student_name}_{cert_type}_{school_year}.pdf"
|
|
||||||
|
|
||||||
from urllib.parse import quote
|
|
||||||
filename_ascii = filename.encode('ascii', 'replace').decode('ascii')
|
|
||||||
filename_encoded = quote(filename, safe='')
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=pdf_bytes, media_type="application/pdf",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename_ascii}\"; filename*=UTF-8''{filename_encoded}",
|
|
||||||
"Content-Length": str(len(pdf_bytes))
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{cert_id}/submit-review")
|
|
||||||
async def submit_for_review(cert_id: str):
|
|
||||||
"""Reicht Zeugnis zur Pruefung ein."""
|
|
||||||
logger.info(f"Submitting certificate {cert_id} for review")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
if cert_data.get("status") != CertificateStatus.DRAFT:
|
|
||||||
raise HTTPException(status_code=400, detail="Nur Entwuerfe koennen zur Pruefung eingereicht werden")
|
|
||||||
if not cert_data.get("subjects"):
|
|
||||||
raise HTTPException(status_code=400, detail="Keine Fachnoten eingetragen")
|
|
||||||
cert_data["status"] = CertificateStatus.REVIEW
|
|
||||||
_save_certificate(cert_data)
|
|
||||||
return {"message": "Zeugnis wurde zur Pruefung eingereicht", "status": CertificateStatus.REVIEW}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{cert_id}/approve")
|
|
||||||
async def approve_certificate(cert_id: str):
|
|
||||||
"""Genehmigt ein Zeugnis."""
|
|
||||||
logger.info(f"Approving certificate {cert_id}")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
if cert_data.get("status") != CertificateStatus.REVIEW:
|
|
||||||
raise HTTPException(status_code=400, detail="Nur Zeugnisse in Pruefung koennen genehmigt werden")
|
|
||||||
cert_data["status"] = CertificateStatus.APPROVED
|
|
||||||
_save_certificate(cert_data)
|
|
||||||
return {"message": "Zeugnis wurde genehmigt", "status": CertificateStatus.APPROVED}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{cert_id}/issue")
|
|
||||||
async def issue_certificate(cert_id: str):
|
|
||||||
"""Stellt ein Zeugnis offiziell aus."""
|
|
||||||
logger.info(f"Issuing certificate {cert_id}")
|
|
||||||
cert_data = _get_certificate(cert_id)
|
|
||||||
if cert_data.get("status") != CertificateStatus.APPROVED:
|
|
||||||
raise HTTPException(status_code=400, detail="Nur genehmigte Zeugnisse koennen ausgestellt werden")
|
|
||||||
cert_data["status"] = CertificateStatus.ISSUED
|
|
||||||
cert_data["issue_date"] = datetime.now().strftime("%d.%m.%Y")
|
|
||||||
_save_certificate(cert_data)
|
|
||||||
return {"message": "Zeugnis wurde ausgestellt", "status": CertificateStatus.ISSUED, "issue_date": cert_data["issue_date"]}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/student/{student_id}", response_model=CertificateListResponse)
|
|
||||||
async def get_certificates_for_student(
|
|
||||||
student_id: str, page: int = Query(1, ge=1), page_size: int = Query(20, ge=1, le=100)
|
|
||||||
):
|
|
||||||
"""Laedt alle Zeugnisse fuer einen bestimmten Schueler."""
|
|
||||||
logger.info(f"Getting certificates for student: {student_id}")
|
|
||||||
filtered_certs = [c for c in _certificates_store.values() if c.get("student_id") == student_id]
|
|
||||||
filtered_certs.sort(key=lambda x: (x.get("school_year", ""), x.get("certificate_type", "")), reverse=True)
|
|
||||||
total = len(filtered_certs)
|
|
||||||
start = (page - 1) * page_size
|
|
||||||
paginated_certs = filtered_certs[start:start + page_size]
|
|
||||||
return CertificateListResponse(
|
|
||||||
certificates=[CertificateResponse(**c) for c in paginated_certs],
|
|
||||||
total=total, page=page, page_size=page_size
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/class/{class_name}/statistics", response_model=GradeStatistics)
|
|
||||||
async def get_class_statistics(
|
|
||||||
class_name: str,
|
|
||||||
school_year: str = Query(..., description="Schuljahr"),
|
|
||||||
certificate_type: CertificateType = Query(CertificateType.HALBJAHR)
|
|
||||||
):
|
|
||||||
"""Berechnet Notenstatistiken fuer eine Klasse."""
|
|
||||||
logger.info(f"Calculating statistics for class {class_name}")
|
|
||||||
|
|
||||||
class_certs = [
|
|
||||||
c for c in _certificates_store.values()
|
|
||||||
if c.get("student_class") == class_name
|
|
||||||
and c.get("school_year") == school_year
|
|
||||||
and c.get("certificate_type") == certificate_type
|
|
||||||
]
|
|
||||||
|
|
||||||
if not class_certs:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Keine Zeugnisse fuer Klasse {class_name} im Schuljahr {school_year} gefunden")
|
|
||||||
|
|
||||||
all_grades: List[float] = []
|
|
||||||
subject_grades: Dict[str, List[float]] = {}
|
|
||||||
grade_counts = {"1": 0, "2": 0, "3": 0, "4": 0, "5": 0, "6": 0}
|
|
||||||
|
|
||||||
for cert in class_certs:
|
|
||||||
avg = cert.get("average_grade")
|
|
||||||
if avg:
|
|
||||||
all_grades.append(avg)
|
|
||||||
rounded = str(round(avg))
|
|
||||||
if rounded in grade_counts:
|
|
||||||
grade_counts[rounded] += 1
|
|
||||||
|
|
||||||
for subject in cert.get("subjects", []):
|
|
||||||
name = subject.get("name")
|
|
||||||
grade_str = subject.get("grade")
|
|
||||||
try:
|
|
||||||
grade = float(grade_str)
|
|
||||||
if name not in subject_grades:
|
|
||||||
subject_grades[name] = []
|
|
||||||
subject_grades[name].append(grade)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
subject_averages = {
|
|
||||||
name: round(sum(grades) / len(grades), 2)
|
|
||||||
for name, grades in subject_grades.items() if grades
|
|
||||||
}
|
|
||||||
|
|
||||||
return GradeStatistics(
|
|
||||||
class_name=class_name, school_year=school_year,
|
|
||||||
certificate_type=certificate_type, student_count=len(class_certs),
|
|
||||||
average_grade=round(sum(all_grades) / len(all_grades), 2) if all_grades else 0.0,
|
|
||||||
grade_distribution=grade_counts, subject_averages=subject_averages
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,184 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to letters/certificates_models.py
|
||||||
Certificates Models - Pydantic models and enums for Zeugnisverwaltung.
|
import importlib as _importlib
|
||||||
"""
|
import sys as _sys
|
||||||
from datetime import datetime
|
_sys.modules[__name__] = _importlib.import_module("letters.certificates_models")
|
||||||
from typing import Optional, List, Dict
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Enums
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
class CertificateType(str, Enum):
|
|
||||||
"""Typen von Zeugnissen."""
|
|
||||||
HALBJAHR = "halbjahr"
|
|
||||||
JAHRES = "jahres"
|
|
||||||
ABSCHLUSS = "abschluss"
|
|
||||||
ABGANG = "abgang"
|
|
||||||
UEBERGANG = "uebergang"
|
|
||||||
|
|
||||||
|
|
||||||
class CertificateStatus(str, Enum):
|
|
||||||
"""Status eines Zeugnisses."""
|
|
||||||
DRAFT = "draft"
|
|
||||||
REVIEW = "review"
|
|
||||||
APPROVED = "approved"
|
|
||||||
ISSUED = "issued"
|
|
||||||
ARCHIVED = "archived"
|
|
||||||
|
|
||||||
|
|
||||||
class GradeType(str, Enum):
|
|
||||||
"""Notentyp."""
|
|
||||||
NUMERIC = "numeric"
|
|
||||||
POINTS = "points"
|
|
||||||
TEXT = "text"
|
|
||||||
|
|
||||||
|
|
||||||
class BehaviorGrade(str, Enum):
|
|
||||||
"""Verhaltens-/Arbeitsnoten."""
|
|
||||||
A = "A"
|
|
||||||
B = "B"
|
|
||||||
C = "C"
|
|
||||||
D = "D"
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Pydantic Models
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
class SchoolInfoModel(BaseModel):
|
|
||||||
"""Schulinformationen fuer Zeugnis."""
|
|
||||||
name: str
|
|
||||||
address: str
|
|
||||||
phone: str
|
|
||||||
email: str
|
|
||||||
website: Optional[str] = None
|
|
||||||
principal: Optional[str] = None
|
|
||||||
logo_path: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SubjectGrade(BaseModel):
|
|
||||||
"""Note fuer ein Fach."""
|
|
||||||
name: str = Field(..., description="Fachname")
|
|
||||||
grade: str = Field(..., description="Note (1-6 oder A-D)")
|
|
||||||
points: Optional[int] = Field(None, description="Punkte (Oberstufe, 0-15)")
|
|
||||||
note: Optional[str] = Field(None, description="Bemerkung zum Fach")
|
|
||||||
|
|
||||||
|
|
||||||
class AttendanceInfo(BaseModel):
|
|
||||||
"""Anwesenheitsinformationen."""
|
|
||||||
days_absent: int = Field(0, description="Fehlende Tage gesamt")
|
|
||||||
days_excused: int = Field(0, description="Entschuldigte Tage")
|
|
||||||
days_unexcused: int = Field(0, description="Unentschuldigte Tage")
|
|
||||||
hours_absent: Optional[int] = Field(None, description="Fehlstunden gesamt")
|
|
||||||
|
|
||||||
|
|
||||||
class CertificateCreateRequest(BaseModel):
|
|
||||||
"""Request zum Erstellen eines neuen Zeugnisses."""
|
|
||||||
student_id: str = Field(..., description="ID des Schuelers")
|
|
||||||
student_name: str = Field(..., description="Name des Schuelers")
|
|
||||||
student_birthdate: str = Field(..., description="Geburtsdatum")
|
|
||||||
student_class: str = Field(..., description="Klasse")
|
|
||||||
school_year: str = Field(..., description="Schuljahr (z.B. '2024/2025')")
|
|
||||||
certificate_type: CertificateType = Field(..., description="Art des Zeugnisses")
|
|
||||||
subjects: List[SubjectGrade] = Field(..., description="Fachnoten")
|
|
||||||
attendance: AttendanceInfo = Field(default_factory=AttendanceInfo)
|
|
||||||
remarks: Optional[str] = Field(None, description="Bemerkungen")
|
|
||||||
class_teacher: str = Field(..., description="Klassenlehrer/in")
|
|
||||||
principal: str = Field(..., description="Schulleiter/in")
|
|
||||||
school_info: Optional[SchoolInfoModel] = Field(None)
|
|
||||||
issue_date: Optional[str] = Field(None, description="Ausstellungsdatum")
|
|
||||||
social_behavior: Optional[BehaviorGrade] = Field(None)
|
|
||||||
work_behavior: Optional[BehaviorGrade] = Field(None)
|
|
||||||
|
|
||||||
|
|
||||||
class CertificateUpdateRequest(BaseModel):
|
|
||||||
"""Request zum Aktualisieren eines Zeugnisses."""
|
|
||||||
subjects: Optional[List[SubjectGrade]] = None
|
|
||||||
attendance: Optional[AttendanceInfo] = None
|
|
||||||
remarks: Optional[str] = None
|
|
||||||
class_teacher: Optional[str] = None
|
|
||||||
principal: Optional[str] = None
|
|
||||||
social_behavior: Optional[BehaviorGrade] = None
|
|
||||||
work_behavior: Optional[BehaviorGrade] = None
|
|
||||||
status: Optional[CertificateStatus] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CertificateResponse(BaseModel):
|
|
||||||
"""Response mit Zeugnisdaten."""
|
|
||||||
id: str
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
student_birthdate: str
|
|
||||||
student_class: str
|
|
||||||
school_year: str
|
|
||||||
certificate_type: CertificateType
|
|
||||||
subjects: List[SubjectGrade]
|
|
||||||
attendance: AttendanceInfo
|
|
||||||
remarks: Optional[str]
|
|
||||||
class_teacher: str
|
|
||||||
principal: str
|
|
||||||
school_info: Optional[SchoolInfoModel]
|
|
||||||
issue_date: Optional[str]
|
|
||||||
social_behavior: Optional[BehaviorGrade]
|
|
||||||
work_behavior: Optional[BehaviorGrade]
|
|
||||||
status: CertificateStatus
|
|
||||||
average_grade: Optional[float]
|
|
||||||
pdf_path: Optional[str]
|
|
||||||
dsms_cid: Optional[str]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class CertificateListResponse(BaseModel):
|
|
||||||
"""Response mit Liste von Zeugnissen."""
|
|
||||||
certificates: List[CertificateResponse]
|
|
||||||
total: int
|
|
||||||
page: int
|
|
||||||
page_size: int
|
|
||||||
|
|
||||||
|
|
||||||
class GradeStatistics(BaseModel):
|
|
||||||
"""Notenstatistiken fuer eine Klasse."""
|
|
||||||
class_name: str
|
|
||||||
school_year: str
|
|
||||||
certificate_type: CertificateType
|
|
||||||
student_count: int
|
|
||||||
average_grade: float
|
|
||||||
grade_distribution: Dict[str, int]
|
|
||||||
subject_averages: Dict[str, float]
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Helper Functions
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
def get_type_label(cert_type: CertificateType) -> str:
|
|
||||||
"""Gibt menschenlesbare Labels fuer Zeugnistypen zurueck."""
|
|
||||||
labels = {
|
|
||||||
CertificateType.HALBJAHR: "Halbjahreszeugnis",
|
|
||||||
CertificateType.JAHRES: "Jahreszeugnis",
|
|
||||||
CertificateType.ABSCHLUSS: "Abschlusszeugnis",
|
|
||||||
CertificateType.ABGANG: "Abgangszeugnis",
|
|
||||||
CertificateType.UEBERGANG: "Uebergangszeugnis",
|
|
||||||
}
|
|
||||||
return labels.get(cert_type, cert_type.value)
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_average(subjects: List[Dict]) -> Optional[float]:
|
|
||||||
"""Berechnet Notendurchschnitt."""
|
|
||||||
numeric_grades = []
|
|
||||||
for subject in subjects:
|
|
||||||
grade = subject.get("grade", "")
|
|
||||||
try:
|
|
||||||
numeric = float(grade)
|
|
||||||
if 1 <= numeric <= 6:
|
|
||||||
numeric_grades.append(numeric)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
pass
|
|
||||||
if numeric_grades:
|
|
||||||
return round(sum(numeric_grades) / len(numeric_grades), 2)
|
|
||||||
return None
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# correction — Klassenarbeits-Korrektur (grading, feedback, OCR).
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Correction API - REST API fuer Klassenarbeits-Korrektur.
|
||||||
|
|
||||||
|
Barrel re-export: router and all public symbols.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .endpoints import router # noqa: F401
|
||||||
|
from .models import ( # noqa: F401
|
||||||
|
CorrectionStatus,
|
||||||
|
AnswerEvaluation,
|
||||||
|
CorrectionCreate,
|
||||||
|
CorrectionUpdate,
|
||||||
|
Correction,
|
||||||
|
CorrectionResponse,
|
||||||
|
OCRResponse,
|
||||||
|
AnalysisResponse,
|
||||||
|
)
|
||||||
|
from .helpers import ( # noqa: F401
|
||||||
|
corrections_store,
|
||||||
|
calculate_grade,
|
||||||
|
generate_ai_feedback,
|
||||||
|
process_ocr,
|
||||||
|
)
|
||||||
@@ -0,0 +1,474 @@
|
|||||||
|
"""
|
||||||
|
Correction API - REST endpoint handlers.
|
||||||
|
|
||||||
|
Workflow:
|
||||||
|
1. Upload: Gescannte Klassenarbeit hochladen
|
||||||
|
2. OCR: Text aus Handschrift extrahieren
|
||||||
|
3. Analyse: Antworten analysieren und bewerten
|
||||||
|
4. Feedback: KI-generiertes Feedback erstellen
|
||||||
|
5. Export: Korrigierte Arbeit als PDF exportieren
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, UploadFile, File, BackgroundTasks
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
CorrectionStatus,
|
||||||
|
AnswerEvaluation,
|
||||||
|
CorrectionCreate,
|
||||||
|
CorrectionUpdate,
|
||||||
|
Correction,
|
||||||
|
CorrectionResponse,
|
||||||
|
AnalysisResponse,
|
||||||
|
UPLOAD_DIR,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
corrections_store,
|
||||||
|
calculate_grade,
|
||||||
|
generate_ai_feedback,
|
||||||
|
process_ocr,
|
||||||
|
PDFService,
|
||||||
|
CorrectionData,
|
||||||
|
StudentInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix="/corrections",
|
||||||
|
tags=["corrections"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# API Endpoints
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@router.post("/", response_model=CorrectionResponse)
|
||||||
|
async def create_correction(data: CorrectionCreate):
|
||||||
|
"""
|
||||||
|
Erstellt eine neue Korrektur.
|
||||||
|
|
||||||
|
Noch ohne Datei - diese wird separat hochgeladen.
|
||||||
|
"""
|
||||||
|
correction_id = str(uuid.uuid4())
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
correction = Correction(
|
||||||
|
id=correction_id,
|
||||||
|
student_id=data.student_id,
|
||||||
|
student_name=data.student_name,
|
||||||
|
class_name=data.class_name,
|
||||||
|
exam_title=data.exam_title,
|
||||||
|
subject=data.subject,
|
||||||
|
max_points=data.max_points,
|
||||||
|
status=CorrectionStatus.UPLOADED,
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now
|
||||||
|
)
|
||||||
|
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
logger.info(f"Created correction {correction_id} for {data.student_name}")
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{correction_id}/upload", response_model=CorrectionResponse)
|
||||||
|
async def upload_exam(
|
||||||
|
correction_id: str,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
file: UploadFile = File(...)
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Laedt gescannte Klassenarbeit hoch und startet OCR.
|
||||||
|
|
||||||
|
Unterstuetzte Formate: PDF, PNG, JPG, JPEG
|
||||||
|
"""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
# Validiere Dateiformat
|
||||||
|
allowed_extensions = {".pdf", ".png", ".jpg", ".jpeg"}
|
||||||
|
file_ext = Path(file.filename).suffix.lower() if file.filename else ""
|
||||||
|
|
||||||
|
if file_ext not in allowed_extensions:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Ungueltiges Dateiformat. Erlaubt: {', '.join(allowed_extensions)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Speichere Datei
|
||||||
|
file_path = UPLOAD_DIR / f"{correction_id}{file_ext}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = await file.read()
|
||||||
|
with open(file_path, "wb") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
correction.file_path = str(file_path)
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
# Starte OCR im Hintergrund
|
||||||
|
background_tasks.add_task(process_ocr, correction_id, str(file_path))
|
||||||
|
|
||||||
|
logger.info(f"Uploaded file for correction {correction_id}: {file.filename}")
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Upload error: {e}")
|
||||||
|
return CorrectionResponse(success=False, error=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{correction_id}", response_model=CorrectionResponse)
|
||||||
|
async def get_correction(correction_id: str):
|
||||||
|
"""Ruft eine Korrektur ab."""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=Dict[str, Any])
|
||||||
|
async def list_corrections(
|
||||||
|
class_name: Optional[str] = None,
|
||||||
|
status: Optional[CorrectionStatus] = None,
|
||||||
|
limit: int = 50
|
||||||
|
):
|
||||||
|
"""Listet Korrekturen auf, optional gefiltert."""
|
||||||
|
corrections = list(corrections_store.values())
|
||||||
|
|
||||||
|
if class_name:
|
||||||
|
corrections = [c for c in corrections if c.class_name == class_name]
|
||||||
|
|
||||||
|
if status:
|
||||||
|
corrections = [c for c in corrections if c.status == status]
|
||||||
|
|
||||||
|
# Sortiere nach Erstellungsdatum (neueste zuerst)
|
||||||
|
corrections.sort(key=lambda x: x.created_at, reverse=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": len(corrections),
|
||||||
|
"corrections": [c.dict() for c in corrections[:limit]]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{correction_id}/analyze", response_model=AnalysisResponse)
|
||||||
|
async def analyze_correction(
|
||||||
|
correction_id: str,
|
||||||
|
expected_answers: Optional[Dict[str, str]] = None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Analysiert die extrahierten Antworten.
|
||||||
|
|
||||||
|
Optional mit Musterloesung fuer automatische Bewertung.
|
||||||
|
"""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
if correction.status not in [CorrectionStatus.OCR_COMPLETE, CorrectionStatus.ANALYZED]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Korrektur im falschen Status: {correction.status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not correction.extracted_text:
|
||||||
|
raise HTTPException(status_code=400, detail="Kein extrahierter Text vorhanden")
|
||||||
|
|
||||||
|
try:
|
||||||
|
correction.status = CorrectionStatus.ANALYZING
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
# Einfache Analyse ohne LLM
|
||||||
|
# Teile Text in Abschnitte (simuliert Aufgabenerkennung)
|
||||||
|
text_parts = correction.extracted_text.split('\n\n')
|
||||||
|
evaluations = []
|
||||||
|
|
||||||
|
for i, part in enumerate(text_parts[:10], start=1): # Max 10 Aufgaben
|
||||||
|
if len(part.strip()) < 5:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Simulierte Bewertung
|
||||||
|
# In Produktion wuerde hier LLM-basierte Analyse stattfinden
|
||||||
|
expected = expected_answers.get(str(i), "") if expected_answers else ""
|
||||||
|
|
||||||
|
# Einfacher Textvergleich (in Produktion: semantischer Vergleich)
|
||||||
|
is_correct = bool(expected and expected.lower() in part.lower())
|
||||||
|
points = correction.max_points / len(text_parts) if text_parts else 0
|
||||||
|
|
||||||
|
evaluation = AnswerEvaluation(
|
||||||
|
question_number=i,
|
||||||
|
extracted_text=part[:200], # Kuerzen fuer Response
|
||||||
|
points_possible=points,
|
||||||
|
points_awarded=points if is_correct else points * 0.5, # Teilpunkte
|
||||||
|
feedback=f"Antwort zu Aufgabe {i}" + (" korrekt." if is_correct else " mit Verbesserungsbedarf."),
|
||||||
|
is_correct=is_correct,
|
||||||
|
confidence=0.7 # Simulierte Confidence
|
||||||
|
)
|
||||||
|
evaluations.append(evaluation)
|
||||||
|
|
||||||
|
# Berechne Gesamtergebnis
|
||||||
|
total_points = sum(e.points_awarded for e in evaluations)
|
||||||
|
percentage = (total_points / correction.max_points * 100) if correction.max_points > 0 else 0
|
||||||
|
suggested_grade = calculate_grade(percentage)
|
||||||
|
|
||||||
|
# Generiere Feedback
|
||||||
|
ai_feedback = generate_ai_feedback(
|
||||||
|
evaluations, total_points, correction.max_points, correction.subject
|
||||||
|
)
|
||||||
|
|
||||||
|
# Aktualisiere Korrektur
|
||||||
|
correction.evaluations = evaluations
|
||||||
|
correction.total_points = total_points
|
||||||
|
correction.percentage = percentage
|
||||||
|
correction.grade = suggested_grade
|
||||||
|
correction.ai_feedback = ai_feedback
|
||||||
|
correction.status = CorrectionStatus.ANALYZED
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
logger.info(f"Analysis complete for {correction_id}: {total_points}/{correction.max_points}")
|
||||||
|
|
||||||
|
return AnalysisResponse(
|
||||||
|
success=True,
|
||||||
|
evaluations=evaluations,
|
||||||
|
total_points=total_points,
|
||||||
|
percentage=percentage,
|
||||||
|
suggested_grade=suggested_grade,
|
||||||
|
ai_feedback=ai_feedback
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Analysis error: {e}")
|
||||||
|
correction.status = CorrectionStatus.ERROR
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
return AnalysisResponse(success=False, error=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{correction_id}", response_model=CorrectionResponse)
|
||||||
|
async def update_correction(correction_id: str, data: CorrectionUpdate):
|
||||||
|
"""
|
||||||
|
Aktualisiert eine Korrektur.
|
||||||
|
|
||||||
|
Ermoeglicht manuelle Anpassungen durch die Lehrkraft.
|
||||||
|
"""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
if data.evaluations is not None:
|
||||||
|
correction.evaluations = data.evaluations
|
||||||
|
correction.total_points = sum(e.points_awarded for e in data.evaluations)
|
||||||
|
correction.percentage = (
|
||||||
|
correction.total_points / correction.max_points * 100
|
||||||
|
) if correction.max_points > 0 else 0
|
||||||
|
|
||||||
|
if data.total_points is not None:
|
||||||
|
correction.total_points = data.total_points
|
||||||
|
correction.percentage = (
|
||||||
|
data.total_points / correction.max_points * 100
|
||||||
|
) if correction.max_points > 0 else 0
|
||||||
|
|
||||||
|
if data.grade is not None:
|
||||||
|
correction.grade = data.grade
|
||||||
|
|
||||||
|
if data.teacher_notes is not None:
|
||||||
|
correction.teacher_notes = data.teacher_notes
|
||||||
|
|
||||||
|
if data.status is not None:
|
||||||
|
correction.status = data.status
|
||||||
|
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{correction_id}/complete", response_model=CorrectionResponse)
|
||||||
|
async def complete_correction(correction_id: str):
|
||||||
|
"""Markiert Korrektur als abgeschlossen."""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
correction.status = CorrectionStatus.COMPLETED
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
logger.info(f"Correction {correction_id} completed: {correction.grade}")
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{correction_id}/export-pdf")
|
||||||
|
async def export_correction_pdf(correction_id: str):
|
||||||
|
"""
|
||||||
|
Exportiert korrigierte Arbeit als PDF.
|
||||||
|
|
||||||
|
Enthaelt:
|
||||||
|
- Originalscan
|
||||||
|
- Bewertungen
|
||||||
|
- Feedback
|
||||||
|
- Gesamtergebnis
|
||||||
|
"""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_service = PDFService()
|
||||||
|
|
||||||
|
# Erstelle CorrectionData
|
||||||
|
correction_data = CorrectionData(
|
||||||
|
student=StudentInfo(
|
||||||
|
student_id=correction.student_id,
|
||||||
|
name=correction.student_name,
|
||||||
|
class_name=correction.class_name
|
||||||
|
),
|
||||||
|
exam_title=correction.exam_title,
|
||||||
|
subject=correction.subject,
|
||||||
|
date=correction.created_at.strftime("%d.%m.%Y"),
|
||||||
|
max_points=correction.max_points,
|
||||||
|
achieved_points=correction.total_points,
|
||||||
|
grade=correction.grade or "",
|
||||||
|
percentage=correction.percentage,
|
||||||
|
corrections=[
|
||||||
|
{
|
||||||
|
"question": f"Aufgabe {e.question_number}",
|
||||||
|
"answer": e.extracted_text,
|
||||||
|
"points": f"{e.points_awarded}/{e.points_possible}",
|
||||||
|
"feedback": e.feedback
|
||||||
|
}
|
||||||
|
for e in correction.evaluations
|
||||||
|
],
|
||||||
|
teacher_notes=correction.teacher_notes or "",
|
||||||
|
ai_feedback=correction.ai_feedback or ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generiere PDF
|
||||||
|
pdf_bytes = pdf_service.generate_correction_pdf(correction_data)
|
||||||
|
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=pdf_bytes,
|
||||||
|
media_type="application/pdf",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f'attachment; filename="korrektur_{correction.student_name}_{correction.exam_title}.pdf"'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"PDF export error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"PDF-Export fehlgeschlagen: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{correction_id}")
|
||||||
|
async def delete_correction(correction_id: str):
|
||||||
|
"""Loescht eine Korrektur."""
|
||||||
|
if correction_id not in corrections_store:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
correction = corrections_store[correction_id]
|
||||||
|
|
||||||
|
# Loesche auch die hochgeladene Datei
|
||||||
|
if correction.file_path and os.path.exists(correction.file_path):
|
||||||
|
try:
|
||||||
|
os.remove(correction.file_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not delete file {correction.file_path}: {e}")
|
||||||
|
|
||||||
|
del corrections_store[correction_id]
|
||||||
|
logger.info(f"Deleted correction {correction_id}")
|
||||||
|
|
||||||
|
return {"status": "deleted", "id": correction_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/class/{class_name}/summary")
|
||||||
|
async def get_class_summary(class_name: str):
|
||||||
|
"""
|
||||||
|
Gibt Zusammenfassung fuer eine Klasse zurueck.
|
||||||
|
|
||||||
|
Enthaelt Statistiken ueber alle Korrekturen der Klasse.
|
||||||
|
"""
|
||||||
|
class_corrections = [
|
||||||
|
c for c in corrections_store.values()
|
||||||
|
if c.class_name == class_name and c.status == CorrectionStatus.COMPLETED
|
||||||
|
]
|
||||||
|
|
||||||
|
if not class_corrections:
|
||||||
|
return {
|
||||||
|
"class_name": class_name,
|
||||||
|
"total_students": 0,
|
||||||
|
"average_percentage": 0,
|
||||||
|
"grade_distribution": {},
|
||||||
|
"corrections": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Berechne Statistiken
|
||||||
|
percentages = [c.percentage for c in class_corrections]
|
||||||
|
average_percentage = sum(percentages) / len(percentages) if percentages else 0
|
||||||
|
|
||||||
|
# Notenverteilung
|
||||||
|
grade_distribution = {}
|
||||||
|
for c in class_corrections:
|
||||||
|
grade = c.grade or "?"
|
||||||
|
grade_distribution[grade] = grade_distribution.get(grade, 0) + 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"class_name": class_name,
|
||||||
|
"total_students": len(class_corrections),
|
||||||
|
"average_percentage": round(average_percentage, 1),
|
||||||
|
"average_points": round(
|
||||||
|
sum(c.total_points for c in class_corrections) / len(class_corrections), 1
|
||||||
|
),
|
||||||
|
"grade_distribution": grade_distribution,
|
||||||
|
"corrections": [
|
||||||
|
{
|
||||||
|
"id": c.id,
|
||||||
|
"student_name": c.student_name,
|
||||||
|
"total_points": c.total_points,
|
||||||
|
"percentage": c.percentage,
|
||||||
|
"grade": c.grade
|
||||||
|
}
|
||||||
|
for c in sorted(class_corrections, key=lambda x: x.student_name)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{correction_id}/ocr/retry", response_model=CorrectionResponse)
|
||||||
|
async def retry_ocr(correction_id: str, background_tasks: BackgroundTasks):
|
||||||
|
"""
|
||||||
|
Wiederholt OCR-Verarbeitung.
|
||||||
|
|
||||||
|
Nuetzlich wenn erste Verarbeitung fehlgeschlagen ist.
|
||||||
|
"""
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
||||||
|
|
||||||
|
if not correction.file_path:
|
||||||
|
raise HTTPException(status_code=400, detail="Keine Datei vorhanden")
|
||||||
|
|
||||||
|
if not os.path.exists(correction.file_path):
|
||||||
|
raise HTTPException(status_code=400, detail="Datei nicht mehr vorhanden")
|
||||||
|
|
||||||
|
# Starte OCR erneut
|
||||||
|
correction.status = CorrectionStatus.UPLOADED
|
||||||
|
correction.extracted_text = None
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
background_tasks.add_task(process_ocr, correction_id, correction.file_path)
|
||||||
|
|
||||||
|
return CorrectionResponse(success=True, correction=correction)
|
||||||
@@ -0,0 +1,134 @@
|
|||||||
|
"""
|
||||||
|
Correction API - Helper functions for grading, feedback, and OCR processing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import List, Dict
|
||||||
|
|
||||||
|
from .models import AnswerEvaluation, CorrectionStatus, Correction
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# FileProcessor requires OpenCV with libGL - make optional for CI
|
||||||
|
try:
|
||||||
|
from services.file_processor import FileProcessor, ProcessingResult
|
||||||
|
_ocr_available = True
|
||||||
|
except (ImportError, OSError):
|
||||||
|
FileProcessor = None # type: ignore
|
||||||
|
ProcessingResult = None # type: ignore
|
||||||
|
_ocr_available = False
|
||||||
|
|
||||||
|
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
||||||
|
try:
|
||||||
|
from services.pdf_service import PDFService, CorrectionData, StudentInfo
|
||||||
|
_pdf_available = True
|
||||||
|
except (ImportError, OSError):
|
||||||
|
PDFService = None # type: ignore
|
||||||
|
CorrectionData = None # type: ignore
|
||||||
|
StudentInfo = None # type: ignore
|
||||||
|
_pdf_available = False
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# In-Memory Storage (spaeter durch DB ersetzen)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
corrections_store: Dict[str, Correction] = {}
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Helper Functions
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def calculate_grade(percentage: float) -> str:
|
||||||
|
"""Berechnet Note aus Prozent (deutsches System)."""
|
||||||
|
if percentage >= 92:
|
||||||
|
return "1"
|
||||||
|
elif percentage >= 81:
|
||||||
|
return "2"
|
||||||
|
elif percentage >= 67:
|
||||||
|
return "3"
|
||||||
|
elif percentage >= 50:
|
||||||
|
return "4"
|
||||||
|
elif percentage >= 30:
|
||||||
|
return "5"
|
||||||
|
else:
|
||||||
|
return "6"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_ai_feedback(
|
||||||
|
evaluations: List[AnswerEvaluation],
|
||||||
|
total_points: float,
|
||||||
|
max_points: float,
|
||||||
|
subject: str
|
||||||
|
) -> str:
|
||||||
|
"""Generiert KI-Feedback basierend auf Bewertung."""
|
||||||
|
# Ohne LLM: Einfaches Template-basiertes Feedback
|
||||||
|
percentage = (total_points / max_points * 100) if max_points > 0 else 0
|
||||||
|
correct_count = sum(1 for e in evaluations if e.is_correct)
|
||||||
|
total_count = len(evaluations)
|
||||||
|
|
||||||
|
if percentage >= 90:
|
||||||
|
intro = "Hervorragende Leistung!"
|
||||||
|
elif percentage >= 75:
|
||||||
|
intro = "Gute Arbeit!"
|
||||||
|
elif percentage >= 60:
|
||||||
|
intro = "Insgesamt eine solide Leistung."
|
||||||
|
elif percentage >= 50:
|
||||||
|
intro = "Die Arbeit zeigt Grundkenntnisse, aber es gibt Verbesserungsbedarf."
|
||||||
|
else:
|
||||||
|
intro = "Es sind deutliche Wissensluecken erkennbar."
|
||||||
|
|
||||||
|
# Finde Verbesserungsbereiche
|
||||||
|
weak_areas = [e for e in evaluations if not e.is_correct]
|
||||||
|
strengths = [e for e in evaluations if e.is_correct and e.confidence > 0.8]
|
||||||
|
|
||||||
|
feedback_parts = [intro]
|
||||||
|
|
||||||
|
if strengths:
|
||||||
|
feedback_parts.append(
|
||||||
|
f"Besonders gut geloest: Aufgabe(n) {', '.join(str(s.question_number) for s in strengths[:3])}."
|
||||||
|
)
|
||||||
|
|
||||||
|
if weak_areas:
|
||||||
|
feedback_parts.append(
|
||||||
|
f"Uebungsbedarf bei: Aufgabe(n) {', '.join(str(w.question_number) for w in weak_areas[:3])}."
|
||||||
|
)
|
||||||
|
|
||||||
|
feedback_parts.append(
|
||||||
|
f"Ergebnis: {correct_count} von {total_count} Aufgaben korrekt ({percentage:.1f}%)."
|
||||||
|
)
|
||||||
|
|
||||||
|
return " ".join(feedback_parts)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_ocr(correction_id: str, file_path: str):
|
||||||
|
"""Background Task fuer OCR-Verarbeitung."""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
correction = corrections_store.get(correction_id)
|
||||||
|
if not correction:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
correction.status = CorrectionStatus.PROCESSING
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
# OCR durchfuehren
|
||||||
|
processor = FileProcessor()
|
||||||
|
result = processor.process_file(file_path)
|
||||||
|
|
||||||
|
if result.success and result.text:
|
||||||
|
correction.extracted_text = result.text
|
||||||
|
correction.status = CorrectionStatus.OCR_COMPLETE
|
||||||
|
else:
|
||||||
|
correction.status = CorrectionStatus.ERROR
|
||||||
|
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"OCR error for {correction_id}: {e}")
|
||||||
|
correction.status = CorrectionStatus.ERROR
|
||||||
|
correction.updated_at = datetime.utcnow()
|
||||||
|
corrections_store[correction_id] = correction
|
||||||
@@ -0,0 +1,111 @@
|
|||||||
|
"""
|
||||||
|
Correction API - Pydantic models and enums.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# Upload directory
|
||||||
|
UPLOAD_DIR = Path("/tmp/corrections")
|
||||||
|
UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Enums and Models
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class CorrectionStatus(str, Enum):
|
||||||
|
"""Status einer Korrektur."""
|
||||||
|
UPLOADED = "uploaded" # Datei hochgeladen
|
||||||
|
PROCESSING = "processing" # OCR laeuft
|
||||||
|
OCR_COMPLETE = "ocr_complete" # OCR abgeschlossen
|
||||||
|
ANALYZING = "analyzing" # Analyse laeuft
|
||||||
|
ANALYZED = "analyzed" # Analyse abgeschlossen
|
||||||
|
REVIEWING = "reviewing" # Lehrkraft prueft
|
||||||
|
COMPLETED = "completed" # Korrektur abgeschlossen
|
||||||
|
ERROR = "error" # Fehler aufgetreten
|
||||||
|
|
||||||
|
|
||||||
|
class AnswerEvaluation(BaseModel):
|
||||||
|
"""Bewertung einer einzelnen Antwort."""
|
||||||
|
question_number: int
|
||||||
|
extracted_text: str
|
||||||
|
points_possible: float
|
||||||
|
points_awarded: float
|
||||||
|
feedback: str
|
||||||
|
is_correct: bool
|
||||||
|
confidence: float # 0-1, wie sicher die OCR/Analyse ist
|
||||||
|
|
||||||
|
|
||||||
|
class CorrectionCreate(BaseModel):
|
||||||
|
"""Request zum Erstellen einer neuen Korrektur."""
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
class_name: str
|
||||||
|
exam_title: str
|
||||||
|
subject: str
|
||||||
|
max_points: float = Field(default=100.0, ge=0)
|
||||||
|
expected_answers: Optional[Dict[str, str]] = None # Musterloesung
|
||||||
|
|
||||||
|
|
||||||
|
class CorrectionUpdate(BaseModel):
|
||||||
|
"""Request zum Aktualisieren einer Korrektur."""
|
||||||
|
evaluations: Optional[List[AnswerEvaluation]] = None
|
||||||
|
total_points: Optional[float] = None
|
||||||
|
grade: Optional[str] = None
|
||||||
|
teacher_notes: Optional[str] = None
|
||||||
|
status: Optional[CorrectionStatus] = None
|
||||||
|
|
||||||
|
|
||||||
|
class Correction(BaseModel):
|
||||||
|
"""Eine Korrektur."""
|
||||||
|
id: str
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
class_name: str
|
||||||
|
exam_title: str
|
||||||
|
subject: str
|
||||||
|
max_points: float
|
||||||
|
total_points: float = 0.0
|
||||||
|
percentage: float = 0.0
|
||||||
|
grade: Optional[str] = None
|
||||||
|
status: CorrectionStatus
|
||||||
|
file_path: Optional[str] = None
|
||||||
|
extracted_text: Optional[str] = None
|
||||||
|
evaluations: List[AnswerEvaluation] = []
|
||||||
|
teacher_notes: Optional[str] = None
|
||||||
|
ai_feedback: Optional[str] = None
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class CorrectionResponse(BaseModel):
|
||||||
|
"""Response fuer eine Korrektur."""
|
||||||
|
success: bool
|
||||||
|
correction: Optional[Correction] = None
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class OCRResponse(BaseModel):
|
||||||
|
"""Response fuer OCR-Ergebnis."""
|
||||||
|
success: bool
|
||||||
|
extracted_text: Optional[str] = None
|
||||||
|
regions: List[Dict[str, Any]] = []
|
||||||
|
confidence: float = 0.0
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class AnalysisResponse(BaseModel):
|
||||||
|
"""Response fuer Analyse-Ergebnis."""
|
||||||
|
success: bool
|
||||||
|
evaluations: List[AnswerEvaluation] = []
|
||||||
|
total_points: float = 0.0
|
||||||
|
percentage: float = 0.0
|
||||||
|
suggested_grade: Optional[str] = None
|
||||||
|
ai_feedback: Optional[str] = None
|
||||||
|
error: Optional[str] = None
|
||||||
@@ -1,23 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to correction/api.py
|
||||||
Correction API - REST API fuer Klassenarbeits-Korrektur.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Barrel re-export: router and all public symbols.
|
_sys.modules[__name__] = _importlib.import_module("correction.api")
|
||||||
"""
|
|
||||||
|
|
||||||
from correction_endpoints import router # noqa: F401
|
|
||||||
from correction_models import ( # noqa: F401
|
|
||||||
CorrectionStatus,
|
|
||||||
AnswerEvaluation,
|
|
||||||
CorrectionCreate,
|
|
||||||
CorrectionUpdate,
|
|
||||||
Correction,
|
|
||||||
CorrectionResponse,
|
|
||||||
OCRResponse,
|
|
||||||
AnalysisResponse,
|
|
||||||
)
|
|
||||||
from correction_helpers import ( # noqa: F401
|
|
||||||
corrections_store,
|
|
||||||
calculate_grade,
|
|
||||||
generate_ai_feedback,
|
|
||||||
process_ocr,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,474 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to correction/endpoints.py
|
||||||
Correction API - REST endpoint handlers.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Workflow:
|
_sys.modules[__name__] = _importlib.import_module("correction.endpoints")
|
||||||
1. Upload: Gescannte Klassenarbeit hochladen
|
|
||||||
2. OCR: Text aus Handschrift extrahieren
|
|
||||||
3. Analyse: Antworten analysieren und bewerten
|
|
||||||
4. Feedback: KI-generiertes Feedback erstellen
|
|
||||||
5. Export: Korrigierte Arbeit als PDF exportieren
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, UploadFile, File, BackgroundTasks
|
|
||||||
|
|
||||||
from correction_models import (
|
|
||||||
CorrectionStatus,
|
|
||||||
AnswerEvaluation,
|
|
||||||
CorrectionCreate,
|
|
||||||
CorrectionUpdate,
|
|
||||||
Correction,
|
|
||||||
CorrectionResponse,
|
|
||||||
AnalysisResponse,
|
|
||||||
UPLOAD_DIR,
|
|
||||||
)
|
|
||||||
from correction_helpers import (
|
|
||||||
corrections_store,
|
|
||||||
calculate_grade,
|
|
||||||
generate_ai_feedback,
|
|
||||||
process_ocr,
|
|
||||||
PDFService,
|
|
||||||
CorrectionData,
|
|
||||||
StudentInfo,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(
|
|
||||||
prefix="/corrections",
|
|
||||||
tags=["corrections"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# API Endpoints
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
@router.post("/", response_model=CorrectionResponse)
|
|
||||||
async def create_correction(data: CorrectionCreate):
|
|
||||||
"""
|
|
||||||
Erstellt eine neue Korrektur.
|
|
||||||
|
|
||||||
Noch ohne Datei - diese wird separat hochgeladen.
|
|
||||||
"""
|
|
||||||
correction_id = str(uuid.uuid4())
|
|
||||||
now = datetime.utcnow()
|
|
||||||
|
|
||||||
correction = Correction(
|
|
||||||
id=correction_id,
|
|
||||||
student_id=data.student_id,
|
|
||||||
student_name=data.student_name,
|
|
||||||
class_name=data.class_name,
|
|
||||||
exam_title=data.exam_title,
|
|
||||||
subject=data.subject,
|
|
||||||
max_points=data.max_points,
|
|
||||||
status=CorrectionStatus.UPLOADED,
|
|
||||||
created_at=now,
|
|
||||||
updated_at=now
|
|
||||||
)
|
|
||||||
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
logger.info(f"Created correction {correction_id} for {data.student_name}")
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{correction_id}/upload", response_model=CorrectionResponse)
|
|
||||||
async def upload_exam(
|
|
||||||
correction_id: str,
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
file: UploadFile = File(...)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Laedt gescannte Klassenarbeit hoch und startet OCR.
|
|
||||||
|
|
||||||
Unterstuetzte Formate: PDF, PNG, JPG, JPEG
|
|
||||||
"""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
# Validiere Dateiformat
|
|
||||||
allowed_extensions = {".pdf", ".png", ".jpg", ".jpeg"}
|
|
||||||
file_ext = Path(file.filename).suffix.lower() if file.filename else ""
|
|
||||||
|
|
||||||
if file_ext not in allowed_extensions:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Ungueltiges Dateiformat. Erlaubt: {', '.join(allowed_extensions)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Speichere Datei
|
|
||||||
file_path = UPLOAD_DIR / f"{correction_id}{file_ext}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
content = await file.read()
|
|
||||||
with open(file_path, "wb") as f:
|
|
||||||
f.write(content)
|
|
||||||
|
|
||||||
correction.file_path = str(file_path)
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
# Starte OCR im Hintergrund
|
|
||||||
background_tasks.add_task(process_ocr, correction_id, str(file_path))
|
|
||||||
|
|
||||||
logger.info(f"Uploaded file for correction {correction_id}: {file.filename}")
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Upload error: {e}")
|
|
||||||
return CorrectionResponse(success=False, error=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{correction_id}", response_model=CorrectionResponse)
|
|
||||||
async def get_correction(correction_id: str):
|
|
||||||
"""Ruft eine Korrektur ab."""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=Dict[str, Any])
|
|
||||||
async def list_corrections(
|
|
||||||
class_name: Optional[str] = None,
|
|
||||||
status: Optional[CorrectionStatus] = None,
|
|
||||||
limit: int = 50
|
|
||||||
):
|
|
||||||
"""Listet Korrekturen auf, optional gefiltert."""
|
|
||||||
corrections = list(corrections_store.values())
|
|
||||||
|
|
||||||
if class_name:
|
|
||||||
corrections = [c for c in corrections if c.class_name == class_name]
|
|
||||||
|
|
||||||
if status:
|
|
||||||
corrections = [c for c in corrections if c.status == status]
|
|
||||||
|
|
||||||
# Sortiere nach Erstellungsdatum (neueste zuerst)
|
|
||||||
corrections.sort(key=lambda x: x.created_at, reverse=True)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total": len(corrections),
|
|
||||||
"corrections": [c.dict() for c in corrections[:limit]]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{correction_id}/analyze", response_model=AnalysisResponse)
|
|
||||||
async def analyze_correction(
|
|
||||||
correction_id: str,
|
|
||||||
expected_answers: Optional[Dict[str, str]] = None
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Analysiert die extrahierten Antworten.
|
|
||||||
|
|
||||||
Optional mit Musterloesung fuer automatische Bewertung.
|
|
||||||
"""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
if correction.status not in [CorrectionStatus.OCR_COMPLETE, CorrectionStatus.ANALYZED]:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Korrektur im falschen Status: {correction.status}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not correction.extracted_text:
|
|
||||||
raise HTTPException(status_code=400, detail="Kein extrahierter Text vorhanden")
|
|
||||||
|
|
||||||
try:
|
|
||||||
correction.status = CorrectionStatus.ANALYZING
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
# Einfache Analyse ohne LLM
|
|
||||||
# Teile Text in Abschnitte (simuliert Aufgabenerkennung)
|
|
||||||
text_parts = correction.extracted_text.split('\n\n')
|
|
||||||
evaluations = []
|
|
||||||
|
|
||||||
for i, part in enumerate(text_parts[:10], start=1): # Max 10 Aufgaben
|
|
||||||
if len(part.strip()) < 5:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Simulierte Bewertung
|
|
||||||
# In Produktion wuerde hier LLM-basierte Analyse stattfinden
|
|
||||||
expected = expected_answers.get(str(i), "") if expected_answers else ""
|
|
||||||
|
|
||||||
# Einfacher Textvergleich (in Produktion: semantischer Vergleich)
|
|
||||||
is_correct = bool(expected and expected.lower() in part.lower())
|
|
||||||
points = correction.max_points / len(text_parts) if text_parts else 0
|
|
||||||
|
|
||||||
evaluation = AnswerEvaluation(
|
|
||||||
question_number=i,
|
|
||||||
extracted_text=part[:200], # Kuerzen fuer Response
|
|
||||||
points_possible=points,
|
|
||||||
points_awarded=points if is_correct else points * 0.5, # Teilpunkte
|
|
||||||
feedback=f"Antwort zu Aufgabe {i}" + (" korrekt." if is_correct else " mit Verbesserungsbedarf."),
|
|
||||||
is_correct=is_correct,
|
|
||||||
confidence=0.7 # Simulierte Confidence
|
|
||||||
)
|
|
||||||
evaluations.append(evaluation)
|
|
||||||
|
|
||||||
# Berechne Gesamtergebnis
|
|
||||||
total_points = sum(e.points_awarded for e in evaluations)
|
|
||||||
percentage = (total_points / correction.max_points * 100) if correction.max_points > 0 else 0
|
|
||||||
suggested_grade = calculate_grade(percentage)
|
|
||||||
|
|
||||||
# Generiere Feedback
|
|
||||||
ai_feedback = generate_ai_feedback(
|
|
||||||
evaluations, total_points, correction.max_points, correction.subject
|
|
||||||
)
|
|
||||||
|
|
||||||
# Aktualisiere Korrektur
|
|
||||||
correction.evaluations = evaluations
|
|
||||||
correction.total_points = total_points
|
|
||||||
correction.percentage = percentage
|
|
||||||
correction.grade = suggested_grade
|
|
||||||
correction.ai_feedback = ai_feedback
|
|
||||||
correction.status = CorrectionStatus.ANALYZED
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
logger.info(f"Analysis complete for {correction_id}: {total_points}/{correction.max_points}")
|
|
||||||
|
|
||||||
return AnalysisResponse(
|
|
||||||
success=True,
|
|
||||||
evaluations=evaluations,
|
|
||||||
total_points=total_points,
|
|
||||||
percentage=percentage,
|
|
||||||
suggested_grade=suggested_grade,
|
|
||||||
ai_feedback=ai_feedback
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Analysis error: {e}")
|
|
||||||
correction.status = CorrectionStatus.ERROR
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
return AnalysisResponse(success=False, error=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{correction_id}", response_model=CorrectionResponse)
|
|
||||||
async def update_correction(correction_id: str, data: CorrectionUpdate):
|
|
||||||
"""
|
|
||||||
Aktualisiert eine Korrektur.
|
|
||||||
|
|
||||||
Ermoeglicht manuelle Anpassungen durch die Lehrkraft.
|
|
||||||
"""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
if data.evaluations is not None:
|
|
||||||
correction.evaluations = data.evaluations
|
|
||||||
correction.total_points = sum(e.points_awarded for e in data.evaluations)
|
|
||||||
correction.percentage = (
|
|
||||||
correction.total_points / correction.max_points * 100
|
|
||||||
) if correction.max_points > 0 else 0
|
|
||||||
|
|
||||||
if data.total_points is not None:
|
|
||||||
correction.total_points = data.total_points
|
|
||||||
correction.percentage = (
|
|
||||||
data.total_points / correction.max_points * 100
|
|
||||||
) if correction.max_points > 0 else 0
|
|
||||||
|
|
||||||
if data.grade is not None:
|
|
||||||
correction.grade = data.grade
|
|
||||||
|
|
||||||
if data.teacher_notes is not None:
|
|
||||||
correction.teacher_notes = data.teacher_notes
|
|
||||||
|
|
||||||
if data.status is not None:
|
|
||||||
correction.status = data.status
|
|
||||||
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{correction_id}/complete", response_model=CorrectionResponse)
|
|
||||||
async def complete_correction(correction_id: str):
|
|
||||||
"""Markiert Korrektur als abgeschlossen."""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
correction.status = CorrectionStatus.COMPLETED
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
logger.info(f"Correction {correction_id} completed: {correction.grade}")
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{correction_id}/export-pdf")
|
|
||||||
async def export_correction_pdf(correction_id: str):
|
|
||||||
"""
|
|
||||||
Exportiert korrigierte Arbeit als PDF.
|
|
||||||
|
|
||||||
Enthaelt:
|
|
||||||
- Originalscan
|
|
||||||
- Bewertungen
|
|
||||||
- Feedback
|
|
||||||
- Gesamtergebnis
|
|
||||||
"""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
try:
|
|
||||||
pdf_service = PDFService()
|
|
||||||
|
|
||||||
# Erstelle CorrectionData
|
|
||||||
correction_data = CorrectionData(
|
|
||||||
student=StudentInfo(
|
|
||||||
student_id=correction.student_id,
|
|
||||||
name=correction.student_name,
|
|
||||||
class_name=correction.class_name
|
|
||||||
),
|
|
||||||
exam_title=correction.exam_title,
|
|
||||||
subject=correction.subject,
|
|
||||||
date=correction.created_at.strftime("%d.%m.%Y"),
|
|
||||||
max_points=correction.max_points,
|
|
||||||
achieved_points=correction.total_points,
|
|
||||||
grade=correction.grade or "",
|
|
||||||
percentage=correction.percentage,
|
|
||||||
corrections=[
|
|
||||||
{
|
|
||||||
"question": f"Aufgabe {e.question_number}",
|
|
||||||
"answer": e.extracted_text,
|
|
||||||
"points": f"{e.points_awarded}/{e.points_possible}",
|
|
||||||
"feedback": e.feedback
|
|
||||||
}
|
|
||||||
for e in correction.evaluations
|
|
||||||
],
|
|
||||||
teacher_notes=correction.teacher_notes or "",
|
|
||||||
ai_feedback=correction.ai_feedback or ""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generiere PDF
|
|
||||||
pdf_bytes = pdf_service.generate_correction_pdf(correction_data)
|
|
||||||
|
|
||||||
from fastapi.responses import Response
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=pdf_bytes,
|
|
||||||
media_type="application/pdf",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f'attachment; filename="korrektur_{correction.student_name}_{correction.exam_title}.pdf"'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"PDF export error: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"PDF-Export fehlgeschlagen: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{correction_id}")
|
|
||||||
async def delete_correction(correction_id: str):
|
|
||||||
"""Loescht eine Korrektur."""
|
|
||||||
if correction_id not in corrections_store:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
correction = corrections_store[correction_id]
|
|
||||||
|
|
||||||
# Loesche auch die hochgeladene Datei
|
|
||||||
if correction.file_path and os.path.exists(correction.file_path):
|
|
||||||
try:
|
|
||||||
os.remove(correction.file_path)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Could not delete file {correction.file_path}: {e}")
|
|
||||||
|
|
||||||
del corrections_store[correction_id]
|
|
||||||
logger.info(f"Deleted correction {correction_id}")
|
|
||||||
|
|
||||||
return {"status": "deleted", "id": correction_id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/class/{class_name}/summary")
|
|
||||||
async def get_class_summary(class_name: str):
|
|
||||||
"""
|
|
||||||
Gibt Zusammenfassung fuer eine Klasse zurueck.
|
|
||||||
|
|
||||||
Enthaelt Statistiken ueber alle Korrekturen der Klasse.
|
|
||||||
"""
|
|
||||||
class_corrections = [
|
|
||||||
c for c in corrections_store.values()
|
|
||||||
if c.class_name == class_name and c.status == CorrectionStatus.COMPLETED
|
|
||||||
]
|
|
||||||
|
|
||||||
if not class_corrections:
|
|
||||||
return {
|
|
||||||
"class_name": class_name,
|
|
||||||
"total_students": 0,
|
|
||||||
"average_percentage": 0,
|
|
||||||
"grade_distribution": {},
|
|
||||||
"corrections": []
|
|
||||||
}
|
|
||||||
|
|
||||||
# Berechne Statistiken
|
|
||||||
percentages = [c.percentage for c in class_corrections]
|
|
||||||
average_percentage = sum(percentages) / len(percentages) if percentages else 0
|
|
||||||
|
|
||||||
# Notenverteilung
|
|
||||||
grade_distribution = {}
|
|
||||||
for c in class_corrections:
|
|
||||||
grade = c.grade or "?"
|
|
||||||
grade_distribution[grade] = grade_distribution.get(grade, 0) + 1
|
|
||||||
|
|
||||||
return {
|
|
||||||
"class_name": class_name,
|
|
||||||
"total_students": len(class_corrections),
|
|
||||||
"average_percentage": round(average_percentage, 1),
|
|
||||||
"average_points": round(
|
|
||||||
sum(c.total_points for c in class_corrections) / len(class_corrections), 1
|
|
||||||
),
|
|
||||||
"grade_distribution": grade_distribution,
|
|
||||||
"corrections": [
|
|
||||||
{
|
|
||||||
"id": c.id,
|
|
||||||
"student_name": c.student_name,
|
|
||||||
"total_points": c.total_points,
|
|
||||||
"percentage": c.percentage,
|
|
||||||
"grade": c.grade
|
|
||||||
}
|
|
||||||
for c in sorted(class_corrections, key=lambda x: x.student_name)
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{correction_id}/ocr/retry", response_model=CorrectionResponse)
|
|
||||||
async def retry_ocr(correction_id: str, background_tasks: BackgroundTasks):
|
|
||||||
"""
|
|
||||||
Wiederholt OCR-Verarbeitung.
|
|
||||||
|
|
||||||
Nuetzlich wenn erste Verarbeitung fehlgeschlagen ist.
|
|
||||||
"""
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
raise HTTPException(status_code=404, detail="Korrektur nicht gefunden")
|
|
||||||
|
|
||||||
if not correction.file_path:
|
|
||||||
raise HTTPException(status_code=400, detail="Keine Datei vorhanden")
|
|
||||||
|
|
||||||
if not os.path.exists(correction.file_path):
|
|
||||||
raise HTTPException(status_code=400, detail="Datei nicht mehr vorhanden")
|
|
||||||
|
|
||||||
# Starte OCR erneut
|
|
||||||
correction.status = CorrectionStatus.UPLOADED
|
|
||||||
correction.extracted_text = None
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
background_tasks.add_task(process_ocr, correction_id, correction.file_path)
|
|
||||||
|
|
||||||
return CorrectionResponse(success=True, correction=correction)
|
|
||||||
|
|||||||
@@ -1,134 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to correction/helpers.py
|
||||||
Correction API - Helper functions for grading, feedback, and OCR processing.
|
import importlib as _importlib
|
||||||
"""
|
import sys as _sys
|
||||||
|
_sys.modules[__name__] = _importlib.import_module("correction.helpers")
|
||||||
import logging
|
|
||||||
from typing import List, Dict
|
|
||||||
|
|
||||||
from correction_models import AnswerEvaluation, CorrectionStatus, Correction
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# FileProcessor requires OpenCV with libGL - make optional for CI
|
|
||||||
try:
|
|
||||||
from services.file_processor import FileProcessor, ProcessingResult
|
|
||||||
_ocr_available = True
|
|
||||||
except (ImportError, OSError):
|
|
||||||
FileProcessor = None # type: ignore
|
|
||||||
ProcessingResult = None # type: ignore
|
|
||||||
_ocr_available = False
|
|
||||||
|
|
||||||
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
|
||||||
try:
|
|
||||||
from services.pdf_service import PDFService, CorrectionData, StudentInfo
|
|
||||||
_pdf_available = True
|
|
||||||
except (ImportError, OSError):
|
|
||||||
PDFService = None # type: ignore
|
|
||||||
CorrectionData = None # type: ignore
|
|
||||||
StudentInfo = None # type: ignore
|
|
||||||
_pdf_available = False
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# In-Memory Storage (spaeter durch DB ersetzen)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
corrections_store: Dict[str, Correction] = {}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Helper Functions
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def calculate_grade(percentage: float) -> str:
|
|
||||||
"""Berechnet Note aus Prozent (deutsches System)."""
|
|
||||||
if percentage >= 92:
|
|
||||||
return "1"
|
|
||||||
elif percentage >= 81:
|
|
||||||
return "2"
|
|
||||||
elif percentage >= 67:
|
|
||||||
return "3"
|
|
||||||
elif percentage >= 50:
|
|
||||||
return "4"
|
|
||||||
elif percentage >= 30:
|
|
||||||
return "5"
|
|
||||||
else:
|
|
||||||
return "6"
|
|
||||||
|
|
||||||
|
|
||||||
def generate_ai_feedback(
|
|
||||||
evaluations: List[AnswerEvaluation],
|
|
||||||
total_points: float,
|
|
||||||
max_points: float,
|
|
||||||
subject: str
|
|
||||||
) -> str:
|
|
||||||
"""Generiert KI-Feedback basierend auf Bewertung."""
|
|
||||||
# Ohne LLM: Einfaches Template-basiertes Feedback
|
|
||||||
percentage = (total_points / max_points * 100) if max_points > 0 else 0
|
|
||||||
correct_count = sum(1 for e in evaluations if e.is_correct)
|
|
||||||
total_count = len(evaluations)
|
|
||||||
|
|
||||||
if percentage >= 90:
|
|
||||||
intro = "Hervorragende Leistung!"
|
|
||||||
elif percentage >= 75:
|
|
||||||
intro = "Gute Arbeit!"
|
|
||||||
elif percentage >= 60:
|
|
||||||
intro = "Insgesamt eine solide Leistung."
|
|
||||||
elif percentage >= 50:
|
|
||||||
intro = "Die Arbeit zeigt Grundkenntnisse, aber es gibt Verbesserungsbedarf."
|
|
||||||
else:
|
|
||||||
intro = "Es sind deutliche Wissensluecken erkennbar."
|
|
||||||
|
|
||||||
# Finde Verbesserungsbereiche
|
|
||||||
weak_areas = [e for e in evaluations if not e.is_correct]
|
|
||||||
strengths = [e for e in evaluations if e.is_correct and e.confidence > 0.8]
|
|
||||||
|
|
||||||
feedback_parts = [intro]
|
|
||||||
|
|
||||||
if strengths:
|
|
||||||
feedback_parts.append(
|
|
||||||
f"Besonders gut geloest: Aufgabe(n) {', '.join(str(s.question_number) for s in strengths[:3])}."
|
|
||||||
)
|
|
||||||
|
|
||||||
if weak_areas:
|
|
||||||
feedback_parts.append(
|
|
||||||
f"Uebungsbedarf bei: Aufgabe(n) {', '.join(str(w.question_number) for w in weak_areas[:3])}."
|
|
||||||
)
|
|
||||||
|
|
||||||
feedback_parts.append(
|
|
||||||
f"Ergebnis: {correct_count} von {total_count} Aufgaben korrekt ({percentage:.1f}%)."
|
|
||||||
)
|
|
||||||
|
|
||||||
return " ".join(feedback_parts)
|
|
||||||
|
|
||||||
|
|
||||||
async def process_ocr(correction_id: str, file_path: str):
|
|
||||||
"""Background Task fuer OCR-Verarbeitung."""
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
correction = corrections_store.get(correction_id)
|
|
||||||
if not correction:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
correction.status = CorrectionStatus.PROCESSING
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
# OCR durchfuehren
|
|
||||||
processor = FileProcessor()
|
|
||||||
result = processor.process_file(file_path)
|
|
||||||
|
|
||||||
if result.success and result.text:
|
|
||||||
correction.extracted_text = result.text
|
|
||||||
correction.status = CorrectionStatus.OCR_COMPLETE
|
|
||||||
else:
|
|
||||||
correction.status = CorrectionStatus.ERROR
|
|
||||||
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"OCR error for {correction_id}: {e}")
|
|
||||||
correction.status = CorrectionStatus.ERROR
|
|
||||||
correction.updated_at = datetime.utcnow()
|
|
||||||
corrections_store[correction_id] = correction
|
|
||||||
|
|||||||
@@ -1,111 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to correction/models.py
|
||||||
Correction API - Pydantic models and enums.
|
import importlib as _importlib
|
||||||
"""
|
import sys as _sys
|
||||||
|
_sys.modules[__name__] = _importlib.import_module("correction.models")
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Dict, Any, Optional
|
|
||||||
from enum import Enum
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# Upload directory
|
|
||||||
UPLOAD_DIR = Path("/tmp/corrections")
|
|
||||||
UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Enums and Models
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
class CorrectionStatus(str, Enum):
|
|
||||||
"""Status einer Korrektur."""
|
|
||||||
UPLOADED = "uploaded" # Datei hochgeladen
|
|
||||||
PROCESSING = "processing" # OCR laeuft
|
|
||||||
OCR_COMPLETE = "ocr_complete" # OCR abgeschlossen
|
|
||||||
ANALYZING = "analyzing" # Analyse laeuft
|
|
||||||
ANALYZED = "analyzed" # Analyse abgeschlossen
|
|
||||||
REVIEWING = "reviewing" # Lehrkraft prueft
|
|
||||||
COMPLETED = "completed" # Korrektur abgeschlossen
|
|
||||||
ERROR = "error" # Fehler aufgetreten
|
|
||||||
|
|
||||||
|
|
||||||
class AnswerEvaluation(BaseModel):
|
|
||||||
"""Bewertung einer einzelnen Antwort."""
|
|
||||||
question_number: int
|
|
||||||
extracted_text: str
|
|
||||||
points_possible: float
|
|
||||||
points_awarded: float
|
|
||||||
feedback: str
|
|
||||||
is_correct: bool
|
|
||||||
confidence: float # 0-1, wie sicher die OCR/Analyse ist
|
|
||||||
|
|
||||||
|
|
||||||
class CorrectionCreate(BaseModel):
|
|
||||||
"""Request zum Erstellen einer neuen Korrektur."""
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
class_name: str
|
|
||||||
exam_title: str
|
|
||||||
subject: str
|
|
||||||
max_points: float = Field(default=100.0, ge=0)
|
|
||||||
expected_answers: Optional[Dict[str, str]] = None # Musterloesung
|
|
||||||
|
|
||||||
|
|
||||||
class CorrectionUpdate(BaseModel):
|
|
||||||
"""Request zum Aktualisieren einer Korrektur."""
|
|
||||||
evaluations: Optional[List[AnswerEvaluation]] = None
|
|
||||||
total_points: Optional[float] = None
|
|
||||||
grade: Optional[str] = None
|
|
||||||
teacher_notes: Optional[str] = None
|
|
||||||
status: Optional[CorrectionStatus] = None
|
|
||||||
|
|
||||||
|
|
||||||
class Correction(BaseModel):
|
|
||||||
"""Eine Korrektur."""
|
|
||||||
id: str
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
class_name: str
|
|
||||||
exam_title: str
|
|
||||||
subject: str
|
|
||||||
max_points: float
|
|
||||||
total_points: float = 0.0
|
|
||||||
percentage: float = 0.0
|
|
||||||
grade: Optional[str] = None
|
|
||||||
status: CorrectionStatus
|
|
||||||
file_path: Optional[str] = None
|
|
||||||
extracted_text: Optional[str] = None
|
|
||||||
evaluations: List[AnswerEvaluation] = []
|
|
||||||
teacher_notes: Optional[str] = None
|
|
||||||
ai_feedback: Optional[str] = None
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class CorrectionResponse(BaseModel):
|
|
||||||
"""Response fuer eine Korrektur."""
|
|
||||||
success: bool
|
|
||||||
correction: Optional[Correction] = None
|
|
||||||
error: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class OCRResponse(BaseModel):
|
|
||||||
"""Response fuer OCR-Ergebnis."""
|
|
||||||
success: bool
|
|
||||||
extracted_text: Optional[str] = None
|
|
||||||
regions: List[Dict[str, Any]] = []
|
|
||||||
confidence: float = 0.0
|
|
||||||
error: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class AnalysisResponse(BaseModel):
|
|
||||||
"""Response fuer Analyse-Ergebnis."""
|
|
||||||
success: bool
|
|
||||||
evaluations: List[AnswerEvaluation] = []
|
|
||||||
total_points: float = 0.0
|
|
||||||
percentage: float = 0.0
|
|
||||||
suggested_grade: Optional[str] = None
|
|
||||||
ai_feedback: Optional[str] = None
|
|
||||||
error: Optional[str] = None
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# dashboard — Teacher dashboard, unit assignments, analytics.
|
||||||
@@ -0,0 +1,267 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Teacher Dashboard - Analytics & Progress Routes
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
UnitAssignmentStatus, TeacherControlSettings,
|
||||||
|
UnitAssignment, StudentUnitProgress, ClassUnitProgress,
|
||||||
|
MisconceptionReport, ClassAnalyticsSummary, ContentResource,
|
||||||
|
get_current_teacher, get_teacher_database,
|
||||||
|
get_classes_for_teacher, get_students_in_class,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Teacher Dashboard"])
|
||||||
|
|
||||||
|
# Shared in-memory store reference (set from teacher_dashboard_api)
|
||||||
|
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def set_assignments_store(store: Dict[str, Dict[str, Any]]):
|
||||||
|
"""Share the in-memory assignments store from the main module."""
|
||||||
|
global _assignments_store
|
||||||
|
_assignments_store = store
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Progress & Analytics
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/assignments/{assignment_id}/progress", response_model=ClassUnitProgress)
|
||||||
|
async def get_assignment_progress(
|
||||||
|
assignment_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> ClassUnitProgress:
|
||||||
|
"""Get detailed progress for an assignment."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignment = None
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get assignment: {e}")
|
||||||
|
if not assignment and assignment_id in _assignments_store:
|
||||||
|
assignment = _assignments_store[assignment_id]
|
||||||
|
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
students = await get_students_in_class(assignment["class_id"])
|
||||||
|
student_progress = []
|
||||||
|
total_completion = 0.0
|
||||||
|
total_precheck = 0.0
|
||||||
|
total_postcheck = 0.0
|
||||||
|
total_time = 0
|
||||||
|
precheck_count = 0
|
||||||
|
postcheck_count = 0
|
||||||
|
started = 0
|
||||||
|
completed = 0
|
||||||
|
|
||||||
|
for student in students:
|
||||||
|
student_id = student.get("id", student.get("student_id"))
|
||||||
|
progress = StudentUnitProgress(
|
||||||
|
student_id=student_id,
|
||||||
|
student_name=student.get("name", f"Student {student_id[:8]}"),
|
||||||
|
status="not_started", completion_rate=0.0, stops_completed=0, total_stops=0,
|
||||||
|
)
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
session_data = await db.get_student_unit_session(
|
||||||
|
student_id=student_id, unit_id=assignment["unit_id"]
|
||||||
|
)
|
||||||
|
if session_data:
|
||||||
|
progress.session_id = session_data.get("session_id")
|
||||||
|
progress.status = "completed" if session_data.get("completed_at") else "in_progress"
|
||||||
|
progress.completion_rate = session_data.get("completion_rate", 0.0)
|
||||||
|
progress.precheck_score = session_data.get("precheck_score")
|
||||||
|
progress.postcheck_score = session_data.get("postcheck_score")
|
||||||
|
progress.time_spent_minutes = session_data.get("duration_seconds", 0) // 60
|
||||||
|
progress.last_activity = session_data.get("updated_at")
|
||||||
|
progress.stops_completed = session_data.get("stops_completed", 0)
|
||||||
|
progress.total_stops = session_data.get("total_stops", 0)
|
||||||
|
if progress.precheck_score is not None and progress.postcheck_score is not None:
|
||||||
|
progress.learning_gain = progress.postcheck_score - progress.precheck_score
|
||||||
|
total_completion += progress.completion_rate
|
||||||
|
total_time += progress.time_spent_minutes
|
||||||
|
if progress.precheck_score is not None:
|
||||||
|
total_precheck += progress.precheck_score
|
||||||
|
precheck_count += 1
|
||||||
|
if progress.postcheck_score is not None:
|
||||||
|
total_postcheck += progress.postcheck_score
|
||||||
|
postcheck_count += 1
|
||||||
|
if progress.status != "not_started":
|
||||||
|
started += 1
|
||||||
|
if progress.status == "completed":
|
||||||
|
completed += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get student progress: {e}")
|
||||||
|
student_progress.append(progress)
|
||||||
|
|
||||||
|
total_students = len(students) or 1
|
||||||
|
return ClassUnitProgress(
|
||||||
|
assignment_id=assignment_id, unit_id=assignment["unit_id"],
|
||||||
|
unit_title=f"Unit {assignment['unit_id']}", class_id=assignment["class_id"],
|
||||||
|
class_name=f"Class {assignment['class_id'][:8]}", total_students=len(students),
|
||||||
|
started_count=started, completed_count=completed,
|
||||||
|
avg_completion_rate=total_completion / total_students,
|
||||||
|
avg_precheck_score=total_precheck / precheck_count if precheck_count > 0 else None,
|
||||||
|
avg_postcheck_score=total_postcheck / postcheck_count if postcheck_count > 0 else None,
|
||||||
|
avg_learning_gain=(total_postcheck / postcheck_count - total_precheck / precheck_count)
|
||||||
|
if precheck_count > 0 and postcheck_count > 0 else None,
|
||||||
|
avg_time_minutes=total_time / started if started > 0 else 0,
|
||||||
|
students=student_progress,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/classes/{class_id}/analytics", response_model=ClassAnalyticsSummary)
|
||||||
|
async def get_class_analytics(
|
||||||
|
class_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> ClassAnalyticsSummary:
|
||||||
|
"""Get summary analytics for a class."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignments = []
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignments = await db.list_assignments(teacher_id=teacher["user_id"], class_id=class_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list assignments: {e}")
|
||||||
|
if not assignments:
|
||||||
|
assignments = [
|
||||||
|
a for a in _assignments_store.values()
|
||||||
|
if a["class_id"] == class_id and a["teacher_id"] == teacher["user_id"]
|
||||||
|
]
|
||||||
|
|
||||||
|
total_units = len(assignments)
|
||||||
|
completed_units = sum(1 for a in assignments if a.get("status") == "completed")
|
||||||
|
active_units = sum(1 for a in assignments if a.get("status") == "active")
|
||||||
|
|
||||||
|
students = await get_students_in_class(class_id)
|
||||||
|
student_scores = {}
|
||||||
|
misconceptions = []
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
for student in students:
|
||||||
|
student_id = student.get("id", student.get("student_id"))
|
||||||
|
analytics = await db.get_student_analytics(student_id)
|
||||||
|
if analytics:
|
||||||
|
student_scores[student_id] = {
|
||||||
|
"name": student.get("name", student_id[:8]),
|
||||||
|
"avg_score": analytics.get("avg_postcheck_score", 0),
|
||||||
|
"total_time": analytics.get("total_time_minutes", 0),
|
||||||
|
}
|
||||||
|
misconceptions_data = await db.get_class_misconceptions(class_id)
|
||||||
|
for m in misconceptions_data:
|
||||||
|
misconceptions.append(MisconceptionReport(
|
||||||
|
concept_id=m["concept_id"], concept_label=m["concept_label"],
|
||||||
|
misconception=m["misconception"], affected_students=m["affected_students"],
|
||||||
|
frequency=m["frequency"], unit_id=m["unit_id"], stop_id=m["stop_id"],
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to aggregate analytics: {e}")
|
||||||
|
|
||||||
|
sorted_students = sorted(student_scores.items(), key=lambda x: x[1]["avg_score"], reverse=True)
|
||||||
|
top_performers = [s[1]["name"] for s in sorted_students[:3]]
|
||||||
|
struggling_students = [s[1]["name"] for s in sorted_students[-3:] if s[1]["avg_score"] < 0.6]
|
||||||
|
total_time = sum(s["total_time"] for s in student_scores.values())
|
||||||
|
avg_scores = [s["avg_score"] for s in student_scores.values() if s["avg_score"] > 0]
|
||||||
|
avg_completion = sum(avg_scores) / len(avg_scores) if avg_scores else 0
|
||||||
|
|
||||||
|
return ClassAnalyticsSummary(
|
||||||
|
class_id=class_id, class_name=f"Klasse {class_id[:8]}",
|
||||||
|
total_units_assigned=total_units, units_completed=completed_units,
|
||||||
|
active_units=active_units, avg_completion_rate=avg_completion,
|
||||||
|
avg_learning_gain=None, total_time_hours=total_time / 60,
|
||||||
|
top_performers=top_performers, struggling_students=struggling_students,
|
||||||
|
common_misconceptions=misconceptions[:5],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/students/{student_id}/progress")
|
||||||
|
async def get_student_progress(
|
||||||
|
student_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get detailed progress for a specific student."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
progress = await db.get_student_full_progress(student_id)
|
||||||
|
return progress
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get student progress: {e}")
|
||||||
|
return {
|
||||||
|
"student_id": student_id, "units_attempted": 0, "units_completed": 0,
|
||||||
|
"avg_score": 0.0, "total_time_minutes": 0, "sessions": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Content Resources
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/assignments/{assignment_id}/resources", response_model=List[ContentResource])
|
||||||
|
async def get_assignment_resources(
|
||||||
|
assignment_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher),
|
||||||
|
request: Request = None
|
||||||
|
) -> List[ContentResource]:
|
||||||
|
"""Get generated content resources for an assignment."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignment = None
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get assignment: {e}")
|
||||||
|
if not assignment and assignment_id in _assignments_store:
|
||||||
|
assignment = _assignments_store[assignment_id]
|
||||||
|
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
unit_id = assignment["unit_id"]
|
||||||
|
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:8000"
|
||||||
|
return [
|
||||||
|
ContentResource(resource_type="h5p", title=f"{unit_id} - H5P Aktivitaeten",
|
||||||
|
url=f"{base_url}/api/units/content/{unit_id}/h5p",
|
||||||
|
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||||
|
ContentResource(resource_type="worksheet", title=f"{unit_id} - Arbeitsblatt (HTML)",
|
||||||
|
url=f"{base_url}/api/units/content/{unit_id}/worksheet",
|
||||||
|
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||||
|
ContentResource(resource_type="pdf", title=f"{unit_id} - Arbeitsblatt (PDF)",
|
||||||
|
url=f"{base_url}/api/units/content/{unit_id}/worksheet.pdf",
|
||||||
|
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/assignments/{assignment_id}/regenerate-content")
|
||||||
|
async def regenerate_content(
|
||||||
|
assignment_id: str,
|
||||||
|
resource_type: str = Query("all", description="h5p, pdf, or all"),
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Trigger regeneration of content resources."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignment = None
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get assignment: {e}")
|
||||||
|
if not assignment and assignment_id in _assignments_store:
|
||||||
|
assignment = _assignments_store[assignment_id]
|
||||||
|
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
logger.info(f"Content regeneration triggered for {assignment['unit_id']}: {resource_type}")
|
||||||
|
return {
|
||||||
|
"status": "queued", "assignment_id": assignment_id,
|
||||||
|
"unit_id": assignment["unit_id"], "resource_type": resource_type,
|
||||||
|
"message": "Content regeneration has been queued",
|
||||||
|
}
|
||||||
@@ -0,0 +1,329 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Teacher Dashboard API
|
||||||
|
# ==============================================
|
||||||
|
# Lehrer-Dashboard fuer Unit-Zuweisung und Analytics.
|
||||||
|
#
|
||||||
|
# Split structure:
|
||||||
|
# - teacher_dashboard_models.py: Models, Auth, DB/School helpers
|
||||||
|
# - teacher_dashboard_analytics.py: Progress, analytics, content routes
|
||||||
|
# - teacher_dashboard_api.py: Assignment CRUD, dashboard, units (this file)
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
UnitAssignmentStatus, TeacherControlSettings, AssignUnitRequest,
|
||||||
|
UnitAssignment,
|
||||||
|
get_current_teacher, get_teacher_database,
|
||||||
|
get_classes_for_teacher,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
from .analytics import (
|
||||||
|
router as analytics_router,
|
||||||
|
set_assignments_store,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/teacher", tags=["Teacher Dashboard"])
|
||||||
|
|
||||||
|
# In-Memory Storage (Fallback)
|
||||||
|
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
# Share the store with the analytics module and include its routes
|
||||||
|
set_assignments_store(_assignments_store)
|
||||||
|
router.include_router(analytics_router)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Unit Assignment
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.post("/assignments", response_model=UnitAssignment)
|
||||||
|
async def assign_unit_to_class(
|
||||||
|
request_data: AssignUnitRequest,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> UnitAssignment:
|
||||||
|
"""Assign a unit to a class."""
|
||||||
|
assignment_id = str(uuid.uuid4())
|
||||||
|
now = datetime.utcnow()
|
||||||
|
settings = request_data.settings or TeacherControlSettings()
|
||||||
|
|
||||||
|
assignment = {
|
||||||
|
"assignment_id": assignment_id, "unit_id": request_data.unit_id,
|
||||||
|
"class_id": request_data.class_id, "teacher_id": teacher["user_id"],
|
||||||
|
"status": UnitAssignmentStatus.ACTIVE, "settings": settings.model_dump(),
|
||||||
|
"due_date": request_data.due_date, "notes": request_data.notes,
|
||||||
|
"created_at": now, "updated_at": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
db = await get_teacher_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
await db.create_assignment(assignment)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to store assignment: {e}")
|
||||||
|
|
||||||
|
_assignments_store[assignment_id] = assignment
|
||||||
|
logger.info(f"Unit {request_data.unit_id} assigned to class {request_data.class_id}")
|
||||||
|
|
||||||
|
return UnitAssignment(
|
||||||
|
assignment_id=assignment_id, unit_id=request_data.unit_id,
|
||||||
|
class_id=request_data.class_id, teacher_id=teacher["user_id"],
|
||||||
|
status=UnitAssignmentStatus.ACTIVE, settings=settings,
|
||||||
|
due_date=request_data.due_date, notes=request_data.notes,
|
||||||
|
created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/assignments", response_model=List[UnitAssignment])
|
||||||
|
async def list_assignments(
|
||||||
|
class_id: Optional[str] = Query(None, description="Filter by class"),
|
||||||
|
status: Optional[UnitAssignmentStatus] = Query(None, description="Filter by status"),
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> List[UnitAssignment]:
|
||||||
|
"""List all unit assignments for the teacher."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignments = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignments = await db.list_assignments(
|
||||||
|
teacher_id=teacher["user_id"],
|
||||||
|
class_id=class_id,
|
||||||
|
status=status.value if status else None
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list assignments: {e}")
|
||||||
|
|
||||||
|
if not assignments:
|
||||||
|
for assignment in _assignments_store.values():
|
||||||
|
if assignment["teacher_id"] != teacher["user_id"]:
|
||||||
|
continue
|
||||||
|
if class_id and assignment["class_id"] != class_id:
|
||||||
|
continue
|
||||||
|
if status and assignment["status"] != status.value:
|
||||||
|
continue
|
||||||
|
assignments.append(assignment)
|
||||||
|
|
||||||
|
return [
|
||||||
|
UnitAssignment(
|
||||||
|
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
||||||
|
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
||||||
|
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
||||||
|
due_date=a.get("due_date"), notes=a.get("notes"),
|
||||||
|
created_at=a["created_at"], updated_at=a["updated_at"],
|
||||||
|
)
|
||||||
|
for a in assignments
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/assignments/{assignment_id}", response_model=UnitAssignment)
|
||||||
|
async def get_assignment(
|
||||||
|
assignment_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> UnitAssignment:
|
||||||
|
"""Get details of a specific assignment."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
if assignment and assignment["teacher_id"] == teacher["user_id"]:
|
||||||
|
return UnitAssignment(
|
||||||
|
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
||||||
|
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
||||||
|
status=assignment["status"],
|
||||||
|
settings=TeacherControlSettings(**assignment["settings"]),
|
||||||
|
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
||||||
|
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get assignment: {e}")
|
||||||
|
|
||||||
|
if assignment_id in _assignments_store:
|
||||||
|
a = _assignments_store[assignment_id]
|
||||||
|
if a["teacher_id"] == teacher["user_id"]:
|
||||||
|
return UnitAssignment(
|
||||||
|
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
||||||
|
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
||||||
|
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
||||||
|
due_date=a.get("due_date"), notes=a.get("notes"),
|
||||||
|
created_at=a["created_at"], updated_at=a["updated_at"],
|
||||||
|
)
|
||||||
|
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/assignments/{assignment_id}")
|
||||||
|
async def update_assignment(
|
||||||
|
assignment_id: str,
|
||||||
|
settings: Optional[TeacherControlSettings] = None,
|
||||||
|
status: Optional[UnitAssignmentStatus] = None,
|
||||||
|
due_date: Optional[datetime] = None,
|
||||||
|
notes: Optional[str] = None,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> UnitAssignment:
|
||||||
|
"""Update assignment settings or status."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
assignment = None
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get assignment: {e}")
|
||||||
|
|
||||||
|
if not assignment and assignment_id in _assignments_store:
|
||||||
|
assignment = _assignments_store[assignment_id]
|
||||||
|
|
||||||
|
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
if settings:
|
||||||
|
assignment["settings"] = settings.model_dump()
|
||||||
|
if status:
|
||||||
|
assignment["status"] = status.value
|
||||||
|
if due_date:
|
||||||
|
assignment["due_date"] = due_date
|
||||||
|
if notes is not None:
|
||||||
|
assignment["notes"] = notes
|
||||||
|
assignment["updated_at"] = datetime.utcnow()
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
await db.update_assignment(assignment)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update assignment: {e}")
|
||||||
|
|
||||||
|
_assignments_store[assignment_id] = assignment
|
||||||
|
|
||||||
|
return UnitAssignment(
|
||||||
|
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
||||||
|
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
||||||
|
status=assignment["status"], settings=TeacherControlSettings(**assignment["settings"]),
|
||||||
|
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
||||||
|
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/assignments/{assignment_id}")
|
||||||
|
async def delete_assignment(
|
||||||
|
assignment_id: str,
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
"""Delete/archive an assignment."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
assignment = await db.get_assignment(assignment_id)
|
||||||
|
if assignment and assignment["teacher_id"] == teacher["user_id"]:
|
||||||
|
await db.delete_assignment(assignment_id)
|
||||||
|
if assignment_id in _assignments_store:
|
||||||
|
del _assignments_store[assignment_id]
|
||||||
|
return {"status": "deleted", "assignment_id": assignment_id}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete assignment: {e}")
|
||||||
|
|
||||||
|
if assignment_id in _assignments_store:
|
||||||
|
a = _assignments_store[assignment_id]
|
||||||
|
if a["teacher_id"] == teacher["user_id"]:
|
||||||
|
del _assignments_store[assignment_id]
|
||||||
|
return {"status": "deleted", "assignment_id": assignment_id}
|
||||||
|
|
||||||
|
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Available Units
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/units/available")
|
||||||
|
async def list_available_units(
|
||||||
|
grade: Optional[str] = Query(None, description="Filter by grade level"),
|
||||||
|
template: Optional[str] = Query(None, description="Filter by template type"),
|
||||||
|
locale: str = Query("de-DE", description="Locale"),
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""List all available units for assignment."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
units = await db.list_available_units(grade=grade, template=template, locale=locale)
|
||||||
|
return units
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list units: {e}")
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"unit_id": "bio_eye_lightpath_v1", "title": "Auge - Lichtstrahl-Flug",
|
||||||
|
"template": "flight_path", "grade_band": ["5", "6", "7"],
|
||||||
|
"duration_minutes": 8, "difficulty": "base",
|
||||||
|
"description": "Reise durch das Auge und folge dem Lichtstrahl",
|
||||||
|
"learning_objectives": ["Verstehen des Lichtwegs durch das Auge",
|
||||||
|
"Funktionen der Augenbestandteile benennen"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"unit_id": "math_pizza_equivalence_v1",
|
||||||
|
"title": "Pizza-Boxenstopp - Brueche und Prozent",
|
||||||
|
"template": "station_loop", "grade_band": ["5", "6"],
|
||||||
|
"duration_minutes": 10, "difficulty": "base",
|
||||||
|
"description": "Entdecke die Verbindung zwischen Bruechen, Dezimalzahlen und Prozent",
|
||||||
|
"learning_objectives": ["Brueche in Prozent umrechnen", "Aequivalenzen erkennen"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Dashboard Overview
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/dashboard")
|
||||||
|
async def get_dashboard(
|
||||||
|
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get teacher dashboard overview."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
classes = await get_classes_for_teacher(teacher["user_id"])
|
||||||
|
|
||||||
|
active_assignments = []
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
active_assignments = await db.list_assignments(
|
||||||
|
teacher_id=teacher["user_id"], status="active"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list assignments: {e}")
|
||||||
|
if not active_assignments:
|
||||||
|
active_assignments = [
|
||||||
|
a for a in _assignments_store.values()
|
||||||
|
if a["teacher_id"] == teacher["user_id"] and a.get("status") == "active"
|
||||||
|
]
|
||||||
|
|
||||||
|
alerts = []
|
||||||
|
for assignment in active_assignments:
|
||||||
|
if assignment.get("due_date") and assignment["due_date"] < datetime.utcnow() + timedelta(days=2):
|
||||||
|
alerts.append({
|
||||||
|
"type": "due_soon", "assignment_id": assignment["assignment_id"],
|
||||||
|
"message": "Zuweisung endet in weniger als 2 Tagen",
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"teacher": {"id": teacher["user_id"], "name": teacher.get("name", "Lehrer"),
|
||||||
|
"email": teacher.get("email")},
|
||||||
|
"classes": len(classes), "active_assignments": len(active_assignments),
|
||||||
|
"total_students": sum(c.get("student_count", 0) for c in classes),
|
||||||
|
"alerts": alerts, "recent_activity": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check() -> Dict[str, Any]:
|
||||||
|
"""Health check for teacher dashboard API."""
|
||||||
|
db = await get_teacher_database()
|
||||||
|
db_status = "connected" if db else "in-memory"
|
||||||
|
return {
|
||||||
|
"status": "healthy", "service": "teacher-dashboard",
|
||||||
|
"database": db_status, "auth_required": REQUIRE_AUTH,
|
||||||
|
}
|
||||||
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
Teacher Dashboard - Pydantic Models, Auth Dependency, and Service Helpers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from fastapi import HTTPException, Request
|
||||||
|
from pydantic import BaseModel
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Feature flags
|
||||||
|
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||||
|
REQUIRE_AUTH = os.getenv("TEACHER_REQUIRE_AUTH", "true").lower() == "true"
|
||||||
|
SCHOOL_SERVICE_URL = os.getenv("SCHOOL_SERVICE_URL", "http://school-service:8084")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Pydantic Models
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
class UnitAssignmentStatus(str, Enum):
|
||||||
|
"""Status of a unit assignment"""
|
||||||
|
DRAFT = "draft"
|
||||||
|
ACTIVE = "active"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
ARCHIVED = "archived"
|
||||||
|
|
||||||
|
|
||||||
|
class TeacherControlSettings(BaseModel):
|
||||||
|
"""Unit settings that teachers can configure"""
|
||||||
|
allow_skip: bool = True
|
||||||
|
allow_replay: bool = True
|
||||||
|
max_time_per_stop_sec: int = 90
|
||||||
|
show_hints: bool = True
|
||||||
|
require_precheck: bool = True
|
||||||
|
require_postcheck: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class AssignUnitRequest(BaseModel):
|
||||||
|
"""Request to assign a unit to a class"""
|
||||||
|
unit_id: str
|
||||||
|
class_id: str
|
||||||
|
due_date: Optional[datetime] = None
|
||||||
|
settings: Optional[TeacherControlSettings] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class UnitAssignment(BaseModel):
|
||||||
|
"""Unit assignment record"""
|
||||||
|
assignment_id: str
|
||||||
|
unit_id: str
|
||||||
|
class_id: str
|
||||||
|
teacher_id: str
|
||||||
|
status: UnitAssignmentStatus
|
||||||
|
settings: TeacherControlSettings
|
||||||
|
due_date: Optional[datetime] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class StudentUnitProgress(BaseModel):
|
||||||
|
"""Progress of a single student on a unit"""
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
session_id: Optional[str] = None
|
||||||
|
status: str # "not_started", "in_progress", "completed"
|
||||||
|
completion_rate: float = 0.0
|
||||||
|
precheck_score: Optional[float] = None
|
||||||
|
postcheck_score: Optional[float] = None
|
||||||
|
learning_gain: Optional[float] = None
|
||||||
|
time_spent_minutes: int = 0
|
||||||
|
last_activity: Optional[datetime] = None
|
||||||
|
current_stop: Optional[str] = None
|
||||||
|
stops_completed: int = 0
|
||||||
|
total_stops: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class ClassUnitProgress(BaseModel):
|
||||||
|
"""Overall progress of a class on a unit"""
|
||||||
|
assignment_id: str
|
||||||
|
unit_id: str
|
||||||
|
unit_title: str
|
||||||
|
class_id: str
|
||||||
|
class_name: str
|
||||||
|
total_students: int
|
||||||
|
started_count: int
|
||||||
|
completed_count: int
|
||||||
|
avg_completion_rate: float
|
||||||
|
avg_precheck_score: Optional[float] = None
|
||||||
|
avg_postcheck_score: Optional[float] = None
|
||||||
|
avg_learning_gain: Optional[float] = None
|
||||||
|
avg_time_minutes: float
|
||||||
|
students: List[StudentUnitProgress]
|
||||||
|
|
||||||
|
|
||||||
|
class MisconceptionReport(BaseModel):
|
||||||
|
"""Report of detected misconceptions"""
|
||||||
|
concept_id: str
|
||||||
|
concept_label: str
|
||||||
|
misconception: str
|
||||||
|
affected_students: List[str]
|
||||||
|
frequency: int
|
||||||
|
unit_id: str
|
||||||
|
stop_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class ClassAnalyticsSummary(BaseModel):
|
||||||
|
"""Summary analytics for a class"""
|
||||||
|
class_id: str
|
||||||
|
class_name: str
|
||||||
|
total_units_assigned: int
|
||||||
|
units_completed: int
|
||||||
|
active_units: int
|
||||||
|
avg_completion_rate: float
|
||||||
|
avg_learning_gain: Optional[float]
|
||||||
|
total_time_hours: float
|
||||||
|
top_performers: List[str]
|
||||||
|
struggling_students: List[str]
|
||||||
|
common_misconceptions: List[MisconceptionReport]
|
||||||
|
|
||||||
|
|
||||||
|
class ContentResource(BaseModel):
|
||||||
|
"""Generated content resource"""
|
||||||
|
resource_type: str # "h5p", "pdf", "worksheet"
|
||||||
|
title: str
|
||||||
|
url: str
|
||||||
|
generated_at: datetime
|
||||||
|
unit_id: str
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Auth Dependency
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
async def get_current_teacher(request: Request) -> Dict[str, Any]:
|
||||||
|
"""Get current teacher from JWT token."""
|
||||||
|
if not REQUIRE_AUTH:
|
||||||
|
return {
|
||||||
|
"user_id": "e9484ad9-32ee-4f2b-a4e1-d182e02ccf20",
|
||||||
|
"email": "demo@breakpilot.app",
|
||||||
|
"role": "teacher",
|
||||||
|
"name": "Demo Lehrer"
|
||||||
|
}
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization", "")
|
||||||
|
if not auth_header.startswith("Bearer "):
|
||||||
|
raise HTTPException(status_code=401, detail="Missing authorization token")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import jwt
|
||||||
|
token = auth_header[7:]
|
||||||
|
secret = os.getenv("JWT_SECRET", "dev-secret-key")
|
||||||
|
payload = jwt.decode(token, secret, algorithms=["HS256"])
|
||||||
|
|
||||||
|
if payload.get("role") not in ["teacher", "admin"]:
|
||||||
|
raise HTTPException(status_code=403, detail="Teacher or admin role required")
|
||||||
|
|
||||||
|
return payload
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
raise HTTPException(status_code=401, detail="Token expired")
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid token")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Database Integration
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
_teacher_db = None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_teacher_database():
|
||||||
|
"""Get teacher database instance with lazy initialization."""
|
||||||
|
global _teacher_db
|
||||||
|
if not USE_DATABASE:
|
||||||
|
return None
|
||||||
|
if _teacher_db is None:
|
||||||
|
try:
|
||||||
|
from unit.database import get_teacher_db
|
||||||
|
_teacher_db = await get_teacher_db()
|
||||||
|
logger.info("Teacher database initialized")
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Teacher database module not available")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Teacher database not available: {e}")
|
||||||
|
return _teacher_db
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# School Service Integration
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
async def get_classes_for_teacher(teacher_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Get classes assigned to a teacher from school service."""
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
try:
|
||||||
|
response = await client.get(
|
||||||
|
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes",
|
||||||
|
headers={"X-Teacher-ID": teacher_id}
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get classes from school service: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
async def get_students_in_class(class_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Get students in a class from school service."""
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
try:
|
||||||
|
response = await client.get(
|
||||||
|
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes/{class_id}/students"
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response.json()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get students from school service: {e}")
|
||||||
|
return []
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Game API (barrel re-export)
|
||||||
|
# ==============================================
|
||||||
|
# This module was split into:
|
||||||
|
# - game_models.py (Pydantic models, difficulty mapping, sample questions)
|
||||||
|
# - game_routes.py (Core game routes: level, quiz, session, leaderboard)
|
||||||
|
# - game_extended_routes.py (Phase 5: achievements, progress, parent, class)
|
||||||
|
#
|
||||||
|
# The `router` object is assembled here by including all sub-routers.
|
||||||
|
# Importers that did `from game_api import router` continue to work.
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .routes import router as _core_router
|
||||||
|
from .session_routes import router as _session_router
|
||||||
|
from .extended_routes import router as _extended_router
|
||||||
|
|
||||||
|
# Re-export models for any direct importers
|
||||||
|
from .game_models import ( # noqa: F401
|
||||||
|
LearningLevel,
|
||||||
|
GameDifficulty,
|
||||||
|
QuizQuestion,
|
||||||
|
QuizAnswer,
|
||||||
|
GameSession,
|
||||||
|
SessionResponse,
|
||||||
|
DIFFICULTY_MAPPING,
|
||||||
|
SAMPLE_QUESTIONS,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-export helpers/state for any direct importers
|
||||||
|
from .routes import ( # noqa: F401
|
||||||
|
get_optional_current_user,
|
||||||
|
get_user_id_from_auth,
|
||||||
|
get_game_database,
|
||||||
|
_sessions,
|
||||||
|
_user_levels,
|
||||||
|
USE_DATABASE,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assemble the combined router.
|
||||||
|
# Both sub-routers use prefix="/api/game", so include without extra prefix.
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(_core_router)
|
||||||
|
router.include_router(_session_router)
|
||||||
|
router.include_router(_extended_router)
|
||||||
@@ -0,0 +1,189 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Game Extended Routes
|
||||||
|
# ==============================================
|
||||||
|
# Phase 5 features: achievements, progress, parent dashboard,
|
||||||
|
# class leaderboard, and display leaderboard.
|
||||||
|
# Extracted from game_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .routes import (
|
||||||
|
get_optional_current_user,
|
||||||
|
get_user_id_from_auth,
|
||||||
|
get_game_database,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Phase 5: Erweiterte Features
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/achievements/{user_id}")
|
||||||
|
async def get_achievements(
|
||||||
|
user_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Gibt Achievements mit Fortschritt fuer einen Benutzer zurueck.
|
||||||
|
|
||||||
|
Achievements werden basierend auf Spielstatistiken berechnet.
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return {"achievements": [], "message": "Database not available"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
achievements = await db.get_student_achievements(user_id)
|
||||||
|
|
||||||
|
unlocked = [a for a in achievements if a.unlocked]
|
||||||
|
locked = [a for a in achievements if not a.unlocked]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"user_id": user_id,
|
||||||
|
"total": len(achievements),
|
||||||
|
"unlocked_count": len(unlocked),
|
||||||
|
"achievements": [
|
||||||
|
{
|
||||||
|
"id": a.id,
|
||||||
|
"name": a.name,
|
||||||
|
"description": a.description,
|
||||||
|
"icon": a.icon,
|
||||||
|
"category": a.category,
|
||||||
|
"threshold": a.threshold,
|
||||||
|
"progress": a.progress,
|
||||||
|
"unlocked": a.unlocked,
|
||||||
|
}
|
||||||
|
for a in achievements
|
||||||
|
]
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get achievements: {e}")
|
||||||
|
return {"achievements": [], "message": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/progress/{user_id}")
|
||||||
|
async def get_progress(
|
||||||
|
user_id: str,
|
||||||
|
days: int = Query(30, ge=7, le=90, description="Anzahl Tage zurueck"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Gibt Lernfortschritt ueber Zeit zurueck (fuer Charts).
|
||||||
|
|
||||||
|
- Taegliche Statistiken
|
||||||
|
- Fuer Eltern-Dashboard und Fortschrittsanzeige
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return {"progress": [], "message": "Database not available"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
progress = await db.get_progress_over_time(user_id, days)
|
||||||
|
return {
|
||||||
|
"user_id": user_id,
|
||||||
|
"days": days,
|
||||||
|
"data_points": len(progress),
|
||||||
|
"progress": progress,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get progress: {e}")
|
||||||
|
return {"progress": [], "message": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/parent/children")
|
||||||
|
async def get_children_dashboard(
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Eltern-Dashboard: Statistiken fuer alle Kinder.
|
||||||
|
|
||||||
|
Erfordert Auth mit Eltern-Rolle und children_ids Claim.
|
||||||
|
"""
|
||||||
|
if not REQUIRE_AUTH or user is None:
|
||||||
|
return {
|
||||||
|
"message": "Auth required for parent dashboard",
|
||||||
|
"children": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get children IDs from token
|
||||||
|
children_ids = user.get("raw_claims", {}).get("children_ids", [])
|
||||||
|
|
||||||
|
if not children_ids:
|
||||||
|
return {
|
||||||
|
"message": "No children associated with this account",
|
||||||
|
"children": []
|
||||||
|
}
|
||||||
|
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return {"children": [], "message": "Database not available"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
children_stats = await db.get_children_stats(children_ids)
|
||||||
|
return {
|
||||||
|
"parent_id": user.get("user_id"),
|
||||||
|
"children_count": len(children_ids),
|
||||||
|
"children": children_stats,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get children stats: {e}")
|
||||||
|
return {"children": [], "message": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/leaderboard/class/{class_id}")
|
||||||
|
async def get_class_leaderboard(
|
||||||
|
class_id: str,
|
||||||
|
timeframe: str = Query("week", description="day, week, month, all"),
|
||||||
|
limit: int = Query(10, ge=1, le=50),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Klassenspezifische Rangliste.
|
||||||
|
|
||||||
|
Nur fuer Lehrer oder Schueler der Klasse sichtbar.
|
||||||
|
"""
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
leaderboard = await db.get_class_leaderboard(class_id, timeframe, limit)
|
||||||
|
return leaderboard
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get class leaderboard: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/leaderboard/display")
|
||||||
|
async def get_display_leaderboard(
|
||||||
|
timeframe: str = Query("day", description="day, week, month, all"),
|
||||||
|
limit: int = Query(10, ge=1, le=100),
|
||||||
|
anonymize: bool = Query(True, description="Namen anonymisieren")
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Oeffentliche Rangliste mit Anzeigenamen.
|
||||||
|
|
||||||
|
Standardmaessig anonymisiert fuer Datenschutz.
|
||||||
|
"""
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await db.get_leaderboard_with_names(timeframe, limit, anonymize)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get display leaderboard: {e}")
|
||||||
|
return []
|
||||||
@@ -0,0 +1,322 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Game API Models & Data
|
||||||
|
# ==============================================
|
||||||
|
# Pydantic models, difficulty mappings, and sample questions.
|
||||||
|
# Extracted from game_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional, Literal, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Pydantic Models
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
class LearningLevel(BaseModel):
|
||||||
|
"""Lernniveau eines Benutzers aus dem Breakpilot-System"""
|
||||||
|
user_id: str
|
||||||
|
overall_level: int # 1-5 (1=Anfaenger/Klasse 2, 5=Fortgeschritten/Klasse 6)
|
||||||
|
math_level: float
|
||||||
|
german_level: float
|
||||||
|
english_level: float
|
||||||
|
last_updated: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class GameDifficulty(BaseModel):
|
||||||
|
"""Spielschwierigkeit basierend auf Lernniveau"""
|
||||||
|
lane_speed: float # Geschwindigkeit in m/s
|
||||||
|
obstacle_frequency: float # Hindernisse pro Sekunde
|
||||||
|
power_up_chance: float # Wahrscheinlichkeit fuer Power-Ups (0-1)
|
||||||
|
question_complexity: int # 1-5
|
||||||
|
answer_time: int # Sekunden zum Antworten
|
||||||
|
hints_enabled: bool
|
||||||
|
speech_speed: float # Sprechgeschwindigkeit fuer Audio-Version
|
||||||
|
|
||||||
|
|
||||||
|
class QuizQuestion(BaseModel):
|
||||||
|
"""Quiz-Frage fuer das Spiel"""
|
||||||
|
id: str
|
||||||
|
question_text: str
|
||||||
|
audio_url: Optional[str] = None
|
||||||
|
options: List[str] # 2-4 Antwortmoeglichkeiten
|
||||||
|
correct_index: int # 0-3
|
||||||
|
difficulty: int # 1-5
|
||||||
|
subject: Literal["math", "german", "english", "general"]
|
||||||
|
grade_level: Optional[int] = None # 2-6
|
||||||
|
# NEU: Quiz-Modus
|
||||||
|
quiz_mode: Literal["quick", "pause"] = "quick" # quick=waehrend Fahrt, pause=Spiel haelt an
|
||||||
|
visual_trigger: Optional[str] = None # z.B. "bridge", "house", "tree" - loest Frage aus
|
||||||
|
time_limit_seconds: Optional[float] = None # Zeit bis Antwort noetig (bei quick)
|
||||||
|
|
||||||
|
|
||||||
|
class QuizAnswer(BaseModel):
|
||||||
|
"""Antwort auf eine Quiz-Frage"""
|
||||||
|
question_id: str
|
||||||
|
selected_index: int
|
||||||
|
answer_time_ms: int # Zeit bis zur Antwort in ms
|
||||||
|
was_correct: bool
|
||||||
|
|
||||||
|
|
||||||
|
class GameSession(BaseModel):
|
||||||
|
"""Spielsession-Daten fuer Analytics"""
|
||||||
|
user_id: str
|
||||||
|
game_mode: Literal["video", "audio"]
|
||||||
|
duration_seconds: int
|
||||||
|
distance_traveled: float
|
||||||
|
score: int
|
||||||
|
questions_answered: int
|
||||||
|
questions_correct: int
|
||||||
|
difficulty_level: int
|
||||||
|
quiz_answers: Optional[List[QuizAnswer]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SessionResponse(BaseModel):
|
||||||
|
"""Antwort nach Session-Speicherung"""
|
||||||
|
session_id: str
|
||||||
|
status: str
|
||||||
|
new_level: Optional[int] = None # Falls Lernniveau angepasst wurde
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Schwierigkeits-Mapping
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
DIFFICULTY_MAPPING = {
|
||||||
|
1: GameDifficulty(
|
||||||
|
lane_speed=3.0,
|
||||||
|
obstacle_frequency=0.3,
|
||||||
|
power_up_chance=0.4,
|
||||||
|
question_complexity=1,
|
||||||
|
answer_time=15,
|
||||||
|
hints_enabled=True,
|
||||||
|
speech_speed=0.8
|
||||||
|
),
|
||||||
|
2: GameDifficulty(
|
||||||
|
lane_speed=4.0,
|
||||||
|
obstacle_frequency=0.4,
|
||||||
|
power_up_chance=0.35,
|
||||||
|
question_complexity=2,
|
||||||
|
answer_time=12,
|
||||||
|
hints_enabled=True,
|
||||||
|
speech_speed=0.9
|
||||||
|
),
|
||||||
|
3: GameDifficulty(
|
||||||
|
lane_speed=5.0,
|
||||||
|
obstacle_frequency=0.5,
|
||||||
|
power_up_chance=0.3,
|
||||||
|
question_complexity=3,
|
||||||
|
answer_time=10,
|
||||||
|
hints_enabled=True,
|
||||||
|
speech_speed=1.0
|
||||||
|
),
|
||||||
|
4: GameDifficulty(
|
||||||
|
lane_speed=6.0,
|
||||||
|
obstacle_frequency=0.6,
|
||||||
|
power_up_chance=0.25,
|
||||||
|
question_complexity=4,
|
||||||
|
answer_time=8,
|
||||||
|
hints_enabled=False,
|
||||||
|
speech_speed=1.1
|
||||||
|
),
|
||||||
|
5: GameDifficulty(
|
||||||
|
lane_speed=7.0,
|
||||||
|
obstacle_frequency=0.7,
|
||||||
|
power_up_chance=0.2,
|
||||||
|
question_complexity=5,
|
||||||
|
answer_time=6,
|
||||||
|
hints_enabled=False,
|
||||||
|
speech_speed=1.2
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Beispiel Quiz-Fragen (spaeter aus DB laden)
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
SAMPLE_QUESTIONS = [
|
||||||
|
# ==============================================
|
||||||
|
# QUICK QUESTIONS (waehrend der Fahrt, visuell getriggert)
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
# Englisch Vokabeln - Objekte im Spiel (QUICK MODE)
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-bridge", question_text="What is this?",
|
||||||
|
options=["Bridge", "House"], correct_index=0,
|
||||||
|
difficulty=1, subject="english", grade_level=3,
|
||||||
|
quiz_mode="quick", visual_trigger="bridge", time_limit_seconds=3.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-tree", question_text="What is this?",
|
||||||
|
options=["Tree", "Flower"], correct_index=0,
|
||||||
|
difficulty=1, subject="english", grade_level=3,
|
||||||
|
quiz_mode="quick", visual_trigger="tree", time_limit_seconds=3.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-house", question_text="What is this?",
|
||||||
|
options=["House", "Car"], correct_index=0,
|
||||||
|
difficulty=1, subject="english", grade_level=3,
|
||||||
|
quiz_mode="quick", visual_trigger="house", time_limit_seconds=3.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-car", question_text="What is this?",
|
||||||
|
options=["Car", "Bus"], correct_index=0,
|
||||||
|
difficulty=1, subject="english", grade_level=3,
|
||||||
|
quiz_mode="quick", visual_trigger="car", time_limit_seconds=2.5
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-mountain", question_text="What is this?",
|
||||||
|
options=["Hill", "Mountain", "Valley"], correct_index=1,
|
||||||
|
difficulty=2, subject="english", grade_level=4,
|
||||||
|
quiz_mode="quick", visual_trigger="mountain", time_limit_seconds=3.5
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="vq-river", question_text="What is this?",
|
||||||
|
options=["Lake", "River", "Sea"], correct_index=1,
|
||||||
|
difficulty=2, subject="english", grade_level=4,
|
||||||
|
quiz_mode="quick", visual_trigger="river", time_limit_seconds=3.5
|
||||||
|
),
|
||||||
|
|
||||||
|
# Schnelle Rechenaufgaben (QUICK MODE)
|
||||||
|
QuizQuestion(
|
||||||
|
id="mq-1", question_text="3 + 4 = ?",
|
||||||
|
options=["6", "7"], correct_index=1,
|
||||||
|
difficulty=1, subject="math", grade_level=2,
|
||||||
|
quiz_mode="quick", time_limit_seconds=4.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mq-2", question_text="5 x 2 = ?",
|
||||||
|
options=["10", "12"], correct_index=0,
|
||||||
|
difficulty=1, subject="math", grade_level=2,
|
||||||
|
quiz_mode="quick", time_limit_seconds=4.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mq-3", question_text="8 - 3 = ?",
|
||||||
|
options=["4", "5"], correct_index=1,
|
||||||
|
difficulty=1, subject="math", grade_level=2,
|
||||||
|
quiz_mode="quick", time_limit_seconds=3.5
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mq-4", question_text="6 x 7 = ?",
|
||||||
|
options=["42", "48"], correct_index=0,
|
||||||
|
difficulty=2, subject="math", grade_level=3,
|
||||||
|
quiz_mode="quick", time_limit_seconds=5.0
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mq-5", question_text="9 x 8 = ?",
|
||||||
|
options=["72", "64"], correct_index=0,
|
||||||
|
difficulty=3, subject="math", grade_level=4,
|
||||||
|
quiz_mode="quick", time_limit_seconds=5.0
|
||||||
|
),
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# PAUSE QUESTIONS (Spiel haelt an, mehr Zeit)
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
# Mathe Level 1-2 (Klasse 2-3) - PAUSE MODE
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp1-1", question_text="Anna hat 5 Aepfel. Sie bekommt 3 dazu. Wie viele hat sie jetzt?",
|
||||||
|
options=["6", "7", "8", "9"], correct_index=2,
|
||||||
|
difficulty=1, subject="math", grade_level=2,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp2-1", question_text="Ein Bus hat 24 Sitze. 18 sind besetzt. Wie viele sind frei?",
|
||||||
|
options=["4", "5", "6", "7"], correct_index=2,
|
||||||
|
difficulty=2, subject="math", grade_level=3,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp2-2", question_text="Was ist 45 + 27?",
|
||||||
|
options=["72", "62", "82", "70"], correct_index=0,
|
||||||
|
difficulty=2, subject="math", grade_level=3,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
|
||||||
|
# Mathe Level 3-4 (Klasse 4-5) - PAUSE MODE
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp3-1", question_text="Was ist 7 x 8?",
|
||||||
|
options=["54", "56", "58", "48"], correct_index=1,
|
||||||
|
difficulty=3, subject="math", grade_level=4,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp3-2", question_text="Ein Rechteck ist 8m lang und 5m breit. Wie gross ist die Flaeche?",
|
||||||
|
options=["35 m2", "40 m2", "45 m2", "26 m2"], correct_index=1,
|
||||||
|
difficulty=3, subject="math", grade_level=4,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp4-1", question_text="Was ist 15% von 80?",
|
||||||
|
options=["10", "12", "8", "15"], correct_index=1,
|
||||||
|
difficulty=4, subject="math", grade_level=5,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp4-2", question_text="Was ist 3/4 + 1/2?",
|
||||||
|
options=["5/4", "4/6", "1", "5/6"], correct_index=0,
|
||||||
|
difficulty=4, subject="math", grade_level=5,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
|
||||||
|
# Mathe Level 5 (Klasse 6) - PAUSE MODE
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp5-1", question_text="Was ist (-5) x (-3)?",
|
||||||
|
options=["-15", "15", "-8", "8"], correct_index=1,
|
||||||
|
difficulty=5, subject="math", grade_level=6,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="mp5-2", question_text="Loesung von 2x + 5 = 11?",
|
||||||
|
options=["2", "3", "4", "6"], correct_index=1,
|
||||||
|
difficulty=5, subject="math", grade_level=6,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
|
||||||
|
# Deutsch - PAUSE MODE (brauchen Lesezeit)
|
||||||
|
QuizQuestion(
|
||||||
|
id="dp1-1", question_text="Welches Wort ist ein Nomen?",
|
||||||
|
options=["laufen", "schnell", "Hund", "und"], correct_index=2,
|
||||||
|
difficulty=1, subject="german", grade_level=2,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="dp2-1", question_text="Was ist die Mehrzahl von 'Haus'?",
|
||||||
|
options=["Haeuse", "Haeuser", "Hausern", "Haus"], correct_index=1,
|
||||||
|
difficulty=2, subject="german", grade_level=3,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="dp3-1", question_text="Welches Verb steht im Praeteritum?",
|
||||||
|
options=["geht", "ging", "gegangen", "gehen"], correct_index=1,
|
||||||
|
difficulty=3, subject="german", grade_level=4,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="dp3-2", question_text="Finde den Rechtschreibfehler: 'Der Hund leuft schnell.'",
|
||||||
|
options=["Hund", "leuft", "schnell", "Der"], correct_index=1,
|
||||||
|
difficulty=3, subject="german", grade_level=4,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
|
||||||
|
# Englisch Saetze - PAUSE MODE
|
||||||
|
QuizQuestion(
|
||||||
|
id="ep3-1", question_text="How do you say 'Schmetterling'?",
|
||||||
|
options=["bird", "bee", "butterfly", "beetle"], correct_index=2,
|
||||||
|
difficulty=3, subject="english", grade_level=4,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="ep4-1", question_text="Choose the correct form: She ___ to school.",
|
||||||
|
options=["go", "goes", "going", "gone"], correct_index=1,
|
||||||
|
difficulty=4, subject="english", grade_level=5,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
QuizQuestion(
|
||||||
|
id="ep4-2", question_text="What is the past tense of 'run'?",
|
||||||
|
options=["runned", "ran", "runed", "running"], correct_index=1,
|
||||||
|
difficulty=4, subject="english", grade_level=5,
|
||||||
|
quiz_mode="pause"
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,296 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Game API Core Routes
|
||||||
|
# ==============================================
|
||||||
|
# Core game endpoints: learning level, difficulty, quiz questions.
|
||||||
|
# Session/stats/leaderboard routes are in game_session_routes.py.
|
||||||
|
# Extracted from game_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
import random
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .game_models import (
|
||||||
|
LearningLevel,
|
||||||
|
GameDifficulty,
|
||||||
|
QuizQuestion,
|
||||||
|
QuizAnswer,
|
||||||
|
GameSession,
|
||||||
|
SessionResponse,
|
||||||
|
DIFFICULTY_MAPPING,
|
||||||
|
SAMPLE_QUESTIONS,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Feature flags
|
||||||
|
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||||
|
REQUIRE_AUTH = os.getenv("GAME_REQUIRE_AUTH", "false").lower() == "true"
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Auth Dependency (Optional)
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
async def get_optional_current_user(request: Request) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Optional auth dependency for Game API.
|
||||||
|
|
||||||
|
If GAME_REQUIRE_AUTH=true: Requires valid JWT token
|
||||||
|
If GAME_REQUIRE_AUTH=false: Returns None (anonymous access)
|
||||||
|
|
||||||
|
In development mode without auth, returns demo user.
|
||||||
|
"""
|
||||||
|
if not REQUIRE_AUTH:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from auth import get_current_user
|
||||||
|
return await get_current_user(request)
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Auth module not available")
|
||||||
|
return None
|
||||||
|
except HTTPException:
|
||||||
|
raise # Re-raise auth errors
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Auth error: {e}")
|
||||||
|
raise HTTPException(status_code=401, detail="Authentication failed")
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_id_from_auth(
|
||||||
|
user: Optional[Dict[str, Any]],
|
||||||
|
requested_user_id: str
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Get the effective user ID, respecting auth when enabled.
|
||||||
|
|
||||||
|
If auth is enabled and user is authenticated:
|
||||||
|
- Returns user's own ID if requested_user_id matches
|
||||||
|
- For parents: allows access to child IDs from token
|
||||||
|
- For teachers: allows access to student IDs (future)
|
||||||
|
|
||||||
|
If auth is disabled: Returns requested_user_id as-is
|
||||||
|
"""
|
||||||
|
if not REQUIRE_AUTH or user is None:
|
||||||
|
return requested_user_id
|
||||||
|
|
||||||
|
user_id = user.get("user_id", "")
|
||||||
|
|
||||||
|
# Same user - always allowed
|
||||||
|
if requested_user_id == user_id:
|
||||||
|
return user_id
|
||||||
|
|
||||||
|
# Check for parent accessing child data
|
||||||
|
children_ids = user.get("raw_claims", {}).get("children_ids", [])
|
||||||
|
if requested_user_id in children_ids:
|
||||||
|
return requested_user_id
|
||||||
|
|
||||||
|
# Check for teacher accessing student data (future)
|
||||||
|
realm_roles = user.get("realm_roles", [])
|
||||||
|
if "lehrer" in realm_roles or "teacher" in realm_roles:
|
||||||
|
# Teachers can access any student in their class (implement class check later)
|
||||||
|
return requested_user_id
|
||||||
|
|
||||||
|
# Admin bypass
|
||||||
|
if "admin" in realm_roles:
|
||||||
|
return requested_user_id
|
||||||
|
|
||||||
|
# Not authorized
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=403,
|
||||||
|
detail="Not authorized to access this user's data"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# In-Memory Session Storage (Fallback wenn DB nicht verfuegbar)
|
||||||
|
_sessions: dict[str, GameSession] = {}
|
||||||
|
_user_levels: dict[str, LearningLevel] = {}
|
||||||
|
|
||||||
|
# Database integration
|
||||||
|
_game_db = None
|
||||||
|
|
||||||
|
async def get_game_database():
|
||||||
|
"""Get game database instance with lazy initialization."""
|
||||||
|
global _game_db
|
||||||
|
if not USE_DATABASE:
|
||||||
|
return None
|
||||||
|
if _game_db is None:
|
||||||
|
try:
|
||||||
|
from game.database import get_game_db
|
||||||
|
_game_db = await get_game_db()
|
||||||
|
logger.info("Game database initialized")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Game database not available, using in-memory: {e}")
|
||||||
|
return _game_db
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpunkte
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/learning-level/{user_id}", response_model=LearningLevel)
|
||||||
|
async def get_learning_level(
|
||||||
|
user_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> LearningLevel:
|
||||||
|
"""
|
||||||
|
Holt das aktuelle Lernniveau eines Benutzers aus Breakpilot.
|
||||||
|
|
||||||
|
- Wird beim Spielstart aufgerufen um Schwierigkeit anzupassen
|
||||||
|
- Gibt Level 1-5 zurueck (1=Anfaenger, 5=Fortgeschritten)
|
||||||
|
- Cached Werte fuer schnellen Zugriff
|
||||||
|
- Speichert in PostgreSQL wenn verfuegbar
|
||||||
|
- Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
# Try database first
|
||||||
|
db = await get_game_database()
|
||||||
|
if db:
|
||||||
|
state = await db.get_learning_state(user_id)
|
||||||
|
if state:
|
||||||
|
return LearningLevel(
|
||||||
|
user_id=user_id,
|
||||||
|
overall_level=state.overall_level,
|
||||||
|
math_level=state.math_level,
|
||||||
|
german_level=state.german_level,
|
||||||
|
english_level=state.english_level,
|
||||||
|
last_updated=state.updated_at or datetime.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new state in database
|
||||||
|
new_state = await db.create_or_update_learning_state(
|
||||||
|
student_id=user_id,
|
||||||
|
overall_level=3,
|
||||||
|
math_level=3.0,
|
||||||
|
german_level=3.0,
|
||||||
|
english_level=3.0
|
||||||
|
)
|
||||||
|
if new_state:
|
||||||
|
return LearningLevel(
|
||||||
|
user_id=user_id,
|
||||||
|
overall_level=new_state.overall_level,
|
||||||
|
math_level=new_state.math_level,
|
||||||
|
german_level=new_state.german_level,
|
||||||
|
english_level=new_state.english_level,
|
||||||
|
last_updated=new_state.updated_at or datetime.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fallback to in-memory
|
||||||
|
if user_id in _user_levels:
|
||||||
|
return _user_levels[user_id]
|
||||||
|
|
||||||
|
# Standard-Level fuer neue Benutzer
|
||||||
|
default_level = LearningLevel(
|
||||||
|
user_id=user_id,
|
||||||
|
overall_level=3, # Mittleres Level als Default
|
||||||
|
math_level=3.0,
|
||||||
|
german_level=3.0,
|
||||||
|
english_level=3.0,
|
||||||
|
last_updated=datetime.now()
|
||||||
|
)
|
||||||
|
_user_levels[user_id] = default_level
|
||||||
|
return default_level
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/difficulty/{level}", response_model=GameDifficulty)
|
||||||
|
async def get_game_difficulty(level: int) -> GameDifficulty:
|
||||||
|
"""
|
||||||
|
Gibt Spielparameter basierend auf Lernniveau zurueck.
|
||||||
|
|
||||||
|
Level 1-5 werden auf Spielgeschwindigkeit, Hindernisfrequenz,
|
||||||
|
Fragen-Schwierigkeit etc. gemappt.
|
||||||
|
"""
|
||||||
|
if level < 1 or level > 5:
|
||||||
|
raise HTTPException(status_code=400, detail="Level muss zwischen 1 und 5 sein")
|
||||||
|
|
||||||
|
return DIFFICULTY_MAPPING[level]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/quiz/questions", response_model=List[QuizQuestion])
|
||||||
|
async def get_quiz_questions(
|
||||||
|
difficulty: int = Query(3, ge=1, le=5, description="Schwierigkeitsgrad 1-5"),
|
||||||
|
count: int = Query(10, ge=1, le=50, description="Anzahl der Fragen"),
|
||||||
|
subject: Optional[str] = Query(None, description="Fach: math, german, english, oder None fuer gemischt"),
|
||||||
|
mode: Optional[str] = Query(None, description="Quiz-Modus: quick (waehrend Fahrt), pause (Spiel pausiert), oder None fuer beide")
|
||||||
|
) -> List[QuizQuestion]:
|
||||||
|
"""
|
||||||
|
Holt Quiz-Fragen fuer das Spiel.
|
||||||
|
|
||||||
|
- Filtert nach Schwierigkeitsgrad (+/- 1 Level)
|
||||||
|
- Optional nach Fach filterbar
|
||||||
|
- Optional nach Modus: "quick" (visuelle Fragen waehrend Fahrt) oder "pause" (Denkaufgaben)
|
||||||
|
- Gibt zufaellige Auswahl zurueck
|
||||||
|
"""
|
||||||
|
# Fragen nach Schwierigkeit filtern (+/- 1 Level Toleranz)
|
||||||
|
filtered = [
|
||||||
|
q for q in SAMPLE_QUESTIONS
|
||||||
|
if abs(q.difficulty - difficulty) <= 1
|
||||||
|
and (subject is None or q.subject == subject)
|
||||||
|
and (mode is None or q.quiz_mode == mode)
|
||||||
|
]
|
||||||
|
|
||||||
|
if not filtered:
|
||||||
|
# Fallback: Alle Fragen wenn keine passenden gefunden
|
||||||
|
filtered = [q for q in SAMPLE_QUESTIONS if mode is None or q.quiz_mode == mode]
|
||||||
|
|
||||||
|
# Zufaellige Auswahl
|
||||||
|
selected = random.sample(filtered, min(count, len(filtered)))
|
||||||
|
return selected
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/quiz/visual-triggers")
|
||||||
|
async def get_visual_triggers() -> List[dict]:
|
||||||
|
"""
|
||||||
|
Gibt alle verfuegbaren visuellen Trigger zurueck.
|
||||||
|
|
||||||
|
Unity verwendet diese Liste um zu wissen, welche Objekte
|
||||||
|
im Spiel Quiz-Fragen ausloesen koennen.
|
||||||
|
"""
|
||||||
|
triggers = {}
|
||||||
|
for q in SAMPLE_QUESTIONS:
|
||||||
|
if q.visual_trigger and q.quiz_mode == "quick":
|
||||||
|
if q.visual_trigger not in triggers:
|
||||||
|
triggers[q.visual_trigger] = {
|
||||||
|
"trigger": q.visual_trigger,
|
||||||
|
"question_count": 0,
|
||||||
|
"difficulties": set(),
|
||||||
|
"subjects": set()
|
||||||
|
}
|
||||||
|
triggers[q.visual_trigger]["question_count"] += 1
|
||||||
|
triggers[q.visual_trigger]["difficulties"].add(q.difficulty)
|
||||||
|
triggers[q.visual_trigger]["subjects"].add(q.subject)
|
||||||
|
|
||||||
|
# Sets zu Listen konvertieren fuer JSON
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"trigger": t["trigger"],
|
||||||
|
"question_count": t["question_count"],
|
||||||
|
"difficulties": list(t["difficulties"]),
|
||||||
|
"subjects": list(t["subjects"])
|
||||||
|
}
|
||||||
|
for t in triggers.values()
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/quiz/answer")
|
||||||
|
async def submit_quiz_answer(answer: QuizAnswer) -> dict:
|
||||||
|
"""
|
||||||
|
Verarbeitet eine Quiz-Antwort (fuer Echtzeit-Feedback).
|
||||||
|
|
||||||
|
In der finalen Version: Speichert in Session, updated Analytics.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"question_id": answer.question_id,
|
||||||
|
"was_correct": answer.was_correct,
|
||||||
|
"points": 500 if answer.was_correct else -100,
|
||||||
|
"message": "Richtig! Weiter so!" if answer.was_correct else "Nicht ganz, versuch es nochmal!"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,395 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Game Session & Stats Routes
|
||||||
|
# ==============================================
|
||||||
|
# Session saving, leaderboard, stats, suggestions,
|
||||||
|
# quiz generation, and health check.
|
||||||
|
# Extracted from game_routes.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .game_models import (
|
||||||
|
LearningLevel,
|
||||||
|
QuizQuestion,
|
||||||
|
GameSession,
|
||||||
|
SessionResponse,
|
||||||
|
SAMPLE_QUESTIONS,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Import shared state and helpers from game_routes
|
||||||
|
# (these are the canonical instances)
|
||||||
|
from .routes import (
|
||||||
|
get_optional_current_user,
|
||||||
|
get_user_id_from_auth,
|
||||||
|
get_game_database,
|
||||||
|
get_quiz_questions,
|
||||||
|
_sessions,
|
||||||
|
_user_levels,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/session", response_model=SessionResponse)
|
||||||
|
async def save_game_session(
|
||||||
|
session: GameSession,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> SessionResponse:
|
||||||
|
"""
|
||||||
|
Speichert eine komplette Spielsession.
|
||||||
|
|
||||||
|
- Protokolliert Score, Distanz, Fragen-Performance
|
||||||
|
- Aktualisiert Lernniveau bei genuegend Daten
|
||||||
|
- Wird am Ende jedes Spiels aufgerufen
|
||||||
|
- Speichert in PostgreSQL wenn verfuegbar
|
||||||
|
- Bei GAME_REQUIRE_AUTH=true: User-ID aus Token
|
||||||
|
"""
|
||||||
|
# If auth is enabled, use user_id from token (ignore session.user_id)
|
||||||
|
effective_user_id = session.user_id
|
||||||
|
if REQUIRE_AUTH and user:
|
||||||
|
effective_user_id = user.get("user_id", session.user_id)
|
||||||
|
|
||||||
|
session_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Lernniveau-Anpassung basierend auf Performance
|
||||||
|
new_level = None
|
||||||
|
old_level = 3 # Default
|
||||||
|
|
||||||
|
# Try to get current level first
|
||||||
|
db = await get_game_database()
|
||||||
|
if db:
|
||||||
|
state = await db.get_learning_state(effective_user_id)
|
||||||
|
if state:
|
||||||
|
old_level = state.overall_level
|
||||||
|
else:
|
||||||
|
# Create initial state if not exists
|
||||||
|
await db.create_or_update_learning_state(effective_user_id)
|
||||||
|
old_level = 3
|
||||||
|
elif effective_user_id in _user_levels:
|
||||||
|
old_level = _user_levels[effective_user_id].overall_level
|
||||||
|
|
||||||
|
# Calculate level adjustment
|
||||||
|
if session.questions_answered >= 5:
|
||||||
|
accuracy = session.questions_correct / session.questions_answered
|
||||||
|
|
||||||
|
# Anpassung: Wenn >80% korrekt und max nicht erreicht -> Level up
|
||||||
|
if accuracy >= 0.8 and old_level < 5:
|
||||||
|
new_level = old_level + 1
|
||||||
|
# Wenn <40% korrekt und min nicht erreicht -> Level down
|
||||||
|
elif accuracy < 0.4 and old_level > 1:
|
||||||
|
new_level = old_level - 1
|
||||||
|
|
||||||
|
# Save to database
|
||||||
|
if db:
|
||||||
|
# Save session
|
||||||
|
db_session_id = await db.save_game_session(
|
||||||
|
student_id=effective_user_id,
|
||||||
|
game_mode=session.game_mode,
|
||||||
|
duration_seconds=session.duration_seconds,
|
||||||
|
distance_traveled=session.distance_traveled,
|
||||||
|
score=session.score,
|
||||||
|
questions_answered=session.questions_answered,
|
||||||
|
questions_correct=session.questions_correct,
|
||||||
|
difficulty_level=session.difficulty_level,
|
||||||
|
)
|
||||||
|
if db_session_id:
|
||||||
|
session_id = db_session_id
|
||||||
|
|
||||||
|
# Save individual quiz answers if provided
|
||||||
|
if session.quiz_answers:
|
||||||
|
for answer in session.quiz_answers:
|
||||||
|
await db.save_quiz_answer(
|
||||||
|
session_id=session_id,
|
||||||
|
question_id=answer.question_id,
|
||||||
|
subject="general", # Could be enhanced to track actual subject
|
||||||
|
difficulty=session.difficulty_level,
|
||||||
|
is_correct=answer.was_correct,
|
||||||
|
answer_time_ms=answer.answer_time_ms,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update learning stats
|
||||||
|
duration_minutes = session.duration_seconds // 60
|
||||||
|
await db.update_learning_stats(
|
||||||
|
student_id=effective_user_id,
|
||||||
|
duration_minutes=duration_minutes,
|
||||||
|
questions_answered=session.questions_answered,
|
||||||
|
questions_correct=session.questions_correct,
|
||||||
|
new_level=new_level,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Fallback to in-memory
|
||||||
|
_sessions[session_id] = session
|
||||||
|
|
||||||
|
if new_level:
|
||||||
|
_user_levels[effective_user_id] = LearningLevel(
|
||||||
|
user_id=effective_user_id,
|
||||||
|
overall_level=new_level,
|
||||||
|
math_level=float(new_level),
|
||||||
|
german_level=float(new_level),
|
||||||
|
english_level=float(new_level),
|
||||||
|
last_updated=datetime.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
return SessionResponse(
|
||||||
|
session_id=session_id,
|
||||||
|
status="saved",
|
||||||
|
new_level=new_level
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/sessions/{user_id}")
|
||||||
|
async def get_user_sessions(
|
||||||
|
user_id: str,
|
||||||
|
limit: int = Query(10, ge=1, le=100),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Holt die letzten Spielsessions eines Benutzers.
|
||||||
|
|
||||||
|
Fuer Statistiken und Fortschrittsanzeige.
|
||||||
|
Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten.
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
# Try database first
|
||||||
|
db = await get_game_database()
|
||||||
|
if db:
|
||||||
|
sessions = await db.get_user_sessions(user_id, limit)
|
||||||
|
if sessions:
|
||||||
|
return sessions
|
||||||
|
|
||||||
|
# Fallback to in-memory
|
||||||
|
user_sessions = [
|
||||||
|
{"session_id": sid, **s.model_dump()}
|
||||||
|
for sid, s in _sessions.items()
|
||||||
|
if s.user_id == user_id
|
||||||
|
]
|
||||||
|
return user_sessions[:limit]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/leaderboard")
|
||||||
|
async def get_leaderboard(
|
||||||
|
timeframe: str = Query("day", description="day, week, month, all"),
|
||||||
|
limit: int = Query(10, ge=1, le=100)
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Gibt Highscore-Liste zurueck.
|
||||||
|
|
||||||
|
- Sortiert nach Punktzahl
|
||||||
|
- Optional nach Zeitraum filterbar
|
||||||
|
"""
|
||||||
|
# Try database first
|
||||||
|
db = await get_game_database()
|
||||||
|
if db:
|
||||||
|
leaderboard = await db.get_leaderboard(timeframe, limit)
|
||||||
|
if leaderboard:
|
||||||
|
return leaderboard
|
||||||
|
|
||||||
|
# Fallback to in-memory
|
||||||
|
# Aggregiere Scores pro User
|
||||||
|
user_scores: dict[str, int] = {}
|
||||||
|
for session in _sessions.values():
|
||||||
|
if session.user_id not in user_scores:
|
||||||
|
user_scores[session.user_id] = 0
|
||||||
|
user_scores[session.user_id] += session.score
|
||||||
|
|
||||||
|
# Sortieren und limitieren
|
||||||
|
leaderboard = [
|
||||||
|
{"rank": i + 1, "user_id": uid, "total_score": score}
|
||||||
|
for i, (uid, score) in enumerate(
|
||||||
|
sorted(user_scores.items(), key=lambda x: x[1], reverse=True)[:limit]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
return leaderboard
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/stats/{user_id}")
|
||||||
|
async def get_user_stats(
|
||||||
|
user_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Gibt detaillierte Statistiken fuer einen Benutzer zurueck.
|
||||||
|
|
||||||
|
- Gesamtstatistiken
|
||||||
|
- Fach-spezifische Statistiken
|
||||||
|
- Lernniveau-Verlauf
|
||||||
|
- Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
db = await get_game_database()
|
||||||
|
if db:
|
||||||
|
state = await db.get_learning_state(user_id)
|
||||||
|
subject_stats = await db.get_subject_stats(user_id)
|
||||||
|
|
||||||
|
if state:
|
||||||
|
return {
|
||||||
|
"user_id": user_id,
|
||||||
|
"overall_level": state.overall_level,
|
||||||
|
"math_level": state.math_level,
|
||||||
|
"german_level": state.german_level,
|
||||||
|
"english_level": state.english_level,
|
||||||
|
"total_play_time_minutes": state.total_play_time_minutes,
|
||||||
|
"total_sessions": state.total_sessions,
|
||||||
|
"questions_answered": state.questions_answered,
|
||||||
|
"questions_correct": state.questions_correct,
|
||||||
|
"accuracy": state.accuracy,
|
||||||
|
"subjects": subject_stats,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Fallback - return defaults
|
||||||
|
return {
|
||||||
|
"user_id": user_id,
|
||||||
|
"overall_level": 3,
|
||||||
|
"math_level": 3.0,
|
||||||
|
"german_level": 3.0,
|
||||||
|
"english_level": 3.0,
|
||||||
|
"total_play_time_minutes": 0,
|
||||||
|
"total_sessions": 0,
|
||||||
|
"questions_answered": 0,
|
||||||
|
"questions_correct": 0,
|
||||||
|
"accuracy": 0.0,
|
||||||
|
"subjects": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/suggestions/{user_id}")
|
||||||
|
async def get_learning_suggestions(
|
||||||
|
user_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Gibt adaptive Lernvorschlaege fuer einen Benutzer zurueck.
|
||||||
|
|
||||||
|
Basierend auf aktueller Performance und Lernhistorie.
|
||||||
|
Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten.
|
||||||
|
"""
|
||||||
|
# Verify access rights
|
||||||
|
user_id = get_user_id_from_auth(user, user_id)
|
||||||
|
|
||||||
|
db = await get_game_database()
|
||||||
|
if not db:
|
||||||
|
return {"suggestions": [], "message": "Database not available"}
|
||||||
|
|
||||||
|
state = await db.get_learning_state(user_id)
|
||||||
|
if not state:
|
||||||
|
return {"suggestions": [], "message": "No learning state found"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
from game.learning_rules import (
|
||||||
|
LearningContext,
|
||||||
|
get_rule_engine,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create context from state
|
||||||
|
context = LearningContext.from_learning_state(state)
|
||||||
|
|
||||||
|
# Get suggestions from rule engine
|
||||||
|
engine = get_rule_engine()
|
||||||
|
suggestions = engine.evaluate(context)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"user_id": user_id,
|
||||||
|
"overall_level": state.overall_level,
|
||||||
|
"suggestions": [
|
||||||
|
{
|
||||||
|
"title": s.title,
|
||||||
|
"description": s.description,
|
||||||
|
"action": s.action.value,
|
||||||
|
"priority": s.priority.name.lower(),
|
||||||
|
"metadata": s.metadata or {},
|
||||||
|
}
|
||||||
|
for s in suggestions[:3] # Top 3 suggestions
|
||||||
|
]
|
||||||
|
}
|
||||||
|
except ImportError:
|
||||||
|
return {"suggestions": [], "message": "Learning rules not available"}
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to get suggestions: {e}")
|
||||||
|
return {"suggestions": [], "message": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/quiz/generate")
|
||||||
|
async def generate_quiz_questions(
|
||||||
|
difficulty: int = Query(3, ge=1, le=5, description="Schwierigkeitsgrad 1-5"),
|
||||||
|
count: int = Query(5, ge=1, le=20, description="Anzahl der Fragen"),
|
||||||
|
subject: Optional[str] = Query(None, description="Fach: math, german, english"),
|
||||||
|
mode: str = Query("quick", description="Quiz-Modus: quick oder pause"),
|
||||||
|
visual_trigger: Optional[str] = Query(None, description="Visueller Trigger: bridge, tree, house, etc.")
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Generiert Quiz-Fragen dynamisch via LLM.
|
||||||
|
|
||||||
|
Fallback auf statische Fragen wenn LLM nicht verfuegbar.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from game.quiz_generator import get_quiz_generator
|
||||||
|
|
||||||
|
generator = await get_quiz_generator()
|
||||||
|
questions = await generator.get_questions(
|
||||||
|
difficulty=difficulty,
|
||||||
|
subject=subject or "general",
|
||||||
|
mode=mode,
|
||||||
|
count=count,
|
||||||
|
visual_trigger=visual_trigger
|
||||||
|
)
|
||||||
|
|
||||||
|
if questions:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": f"gen-{i}",
|
||||||
|
"question_text": q.question_text,
|
||||||
|
"options": q.options,
|
||||||
|
"correct_index": q.correct_index,
|
||||||
|
"difficulty": q.difficulty,
|
||||||
|
"subject": q.subject,
|
||||||
|
"grade_level": q.grade_level,
|
||||||
|
"quiz_mode": q.quiz_mode,
|
||||||
|
"visual_trigger": q.visual_trigger,
|
||||||
|
"time_limit_seconds": q.time_limit_seconds,
|
||||||
|
}
|
||||||
|
for i, q in enumerate(questions)
|
||||||
|
]
|
||||||
|
except ImportError:
|
||||||
|
logger.info("Quiz generator not available, using static questions")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Quiz generation failed: {e}")
|
||||||
|
|
||||||
|
# Fallback to static questions
|
||||||
|
return await get_quiz_questions(difficulty, count, subject, mode)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check() -> dict:
|
||||||
|
"""Health-Check fuer das Spiel-Backend."""
|
||||||
|
db = await get_game_database()
|
||||||
|
db_status = "connected" if db and db._connected else "disconnected"
|
||||||
|
|
||||||
|
# Check LLM availability
|
||||||
|
llm_status = "disabled"
|
||||||
|
try:
|
||||||
|
from game.quiz_generator import get_quiz_generator
|
||||||
|
generator = await get_quiz_generator()
|
||||||
|
llm_status = "connected" if generator._llm_available else "disconnected"
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "breakpilot-drive",
|
||||||
|
"database": db_status,
|
||||||
|
"llm_generator": llm_status,
|
||||||
|
"auth_required": REQUIRE_AUTH,
|
||||||
|
"questions_available": len(SAMPLE_QUESTIONS),
|
||||||
|
"active_sessions": len(_sessions)
|
||||||
|
}
|
||||||
@@ -1,46 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to game/api.py
|
||||||
# Breakpilot Drive - Game API (barrel re-export)
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# This module was split into:
|
_sys.modules[__name__] = _importlib.import_module("game.api")
|
||||||
# - game_models.py (Pydantic models, difficulty mapping, sample questions)
|
|
||||||
# - game_routes.py (Core game routes: level, quiz, session, leaderboard)
|
|
||||||
# - game_extended_routes.py (Phase 5: achievements, progress, parent, class)
|
|
||||||
#
|
|
||||||
# The `router` object is assembled here by including all sub-routers.
|
|
||||||
# Importers that did `from game_api import router` continue to work.
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from game_routes import router as _core_router
|
|
||||||
from game_session_routes import router as _session_router
|
|
||||||
from game_extended_routes import router as _extended_router
|
|
||||||
|
|
||||||
# Re-export models for any direct importers
|
|
||||||
from game_models import ( # noqa: F401
|
|
||||||
LearningLevel,
|
|
||||||
GameDifficulty,
|
|
||||||
QuizQuestion,
|
|
||||||
QuizAnswer,
|
|
||||||
GameSession,
|
|
||||||
SessionResponse,
|
|
||||||
DIFFICULTY_MAPPING,
|
|
||||||
SAMPLE_QUESTIONS,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Re-export helpers/state for any direct importers
|
|
||||||
from game_routes import ( # noqa: F401
|
|
||||||
get_optional_current_user,
|
|
||||||
get_user_id_from_auth,
|
|
||||||
get_game_database,
|
|
||||||
_sessions,
|
|
||||||
_user_levels,
|
|
||||||
USE_DATABASE,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assemble the combined router.
|
|
||||||
# Both sub-routers use prefix="/api/game", so include without extra prefix.
|
|
||||||
router = APIRouter()
|
|
||||||
router.include_router(_core_router)
|
|
||||||
router.include_router(_session_router)
|
|
||||||
router.include_router(_extended_router)
|
|
||||||
|
|||||||
@@ -1,189 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to game/extended_routes.py
|
||||||
# Breakpilot Drive - Game Extended Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Phase 5 features: achievements, progress, parent dashboard,
|
_sys.modules[__name__] = _importlib.import_module("game.extended_routes")
|
||||||
# class leaderboard, and display leaderboard.
|
|
||||||
# Extracted from game_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from game_routes import (
|
|
||||||
get_optional_current_user,
|
|
||||||
get_user_id_from_auth,
|
|
||||||
get_game_database,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Phase 5: Erweiterte Features
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/achievements/{user_id}")
|
|
||||||
async def get_achievements(
|
|
||||||
user_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Gibt Achievements mit Fortschritt fuer einen Benutzer zurueck.
|
|
||||||
|
|
||||||
Achievements werden basierend auf Spielstatistiken berechnet.
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return {"achievements": [], "message": "Database not available"}
|
|
||||||
|
|
||||||
try:
|
|
||||||
achievements = await db.get_student_achievements(user_id)
|
|
||||||
|
|
||||||
unlocked = [a for a in achievements if a.unlocked]
|
|
||||||
locked = [a for a in achievements if not a.unlocked]
|
|
||||||
|
|
||||||
return {
|
|
||||||
"user_id": user_id,
|
|
||||||
"total": len(achievements),
|
|
||||||
"unlocked_count": len(unlocked),
|
|
||||||
"achievements": [
|
|
||||||
{
|
|
||||||
"id": a.id,
|
|
||||||
"name": a.name,
|
|
||||||
"description": a.description,
|
|
||||||
"icon": a.icon,
|
|
||||||
"category": a.category,
|
|
||||||
"threshold": a.threshold,
|
|
||||||
"progress": a.progress,
|
|
||||||
"unlocked": a.unlocked,
|
|
||||||
}
|
|
||||||
for a in achievements
|
|
||||||
]
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get achievements: {e}")
|
|
||||||
return {"achievements": [], "message": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/progress/{user_id}")
|
|
||||||
async def get_progress(
|
|
||||||
user_id: str,
|
|
||||||
days: int = Query(30, ge=7, le=90, description="Anzahl Tage zurueck"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Gibt Lernfortschritt ueber Zeit zurueck (fuer Charts).
|
|
||||||
|
|
||||||
- Taegliche Statistiken
|
|
||||||
- Fuer Eltern-Dashboard und Fortschrittsanzeige
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return {"progress": [], "message": "Database not available"}
|
|
||||||
|
|
||||||
try:
|
|
||||||
progress = await db.get_progress_over_time(user_id, days)
|
|
||||||
return {
|
|
||||||
"user_id": user_id,
|
|
||||||
"days": days,
|
|
||||||
"data_points": len(progress),
|
|
||||||
"progress": progress,
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get progress: {e}")
|
|
||||||
return {"progress": [], "message": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/parent/children")
|
|
||||||
async def get_children_dashboard(
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Eltern-Dashboard: Statistiken fuer alle Kinder.
|
|
||||||
|
|
||||||
Erfordert Auth mit Eltern-Rolle und children_ids Claim.
|
|
||||||
"""
|
|
||||||
if not REQUIRE_AUTH or user is None:
|
|
||||||
return {
|
|
||||||
"message": "Auth required for parent dashboard",
|
|
||||||
"children": []
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get children IDs from token
|
|
||||||
children_ids = user.get("raw_claims", {}).get("children_ids", [])
|
|
||||||
|
|
||||||
if not children_ids:
|
|
||||||
return {
|
|
||||||
"message": "No children associated with this account",
|
|
||||||
"children": []
|
|
||||||
}
|
|
||||||
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return {"children": [], "message": "Database not available"}
|
|
||||||
|
|
||||||
try:
|
|
||||||
children_stats = await db.get_children_stats(children_ids)
|
|
||||||
return {
|
|
||||||
"parent_id": user.get("user_id"),
|
|
||||||
"children_count": len(children_ids),
|
|
||||||
"children": children_stats,
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get children stats: {e}")
|
|
||||||
return {"children": [], "message": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/leaderboard/class/{class_id}")
|
|
||||||
async def get_class_leaderboard(
|
|
||||||
class_id: str,
|
|
||||||
timeframe: str = Query("week", description="day, week, month, all"),
|
|
||||||
limit: int = Query(10, ge=1, le=50),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> List[dict]:
|
|
||||||
"""
|
|
||||||
Klassenspezifische Rangliste.
|
|
||||||
|
|
||||||
Nur fuer Lehrer oder Schueler der Klasse sichtbar.
|
|
||||||
"""
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
leaderboard = await db.get_class_leaderboard(class_id, timeframe, limit)
|
|
||||||
return leaderboard
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get class leaderboard: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/leaderboard/display")
|
|
||||||
async def get_display_leaderboard(
|
|
||||||
timeframe: str = Query("day", description="day, week, month, all"),
|
|
||||||
limit: int = Query(10, ge=1, le=100),
|
|
||||||
anonymize: bool = Query(True, description="Namen anonymisieren")
|
|
||||||
) -> List[dict]:
|
|
||||||
"""
|
|
||||||
Oeffentliche Rangliste mit Anzeigenamen.
|
|
||||||
|
|
||||||
Standardmaessig anonymisiert fuer Datenschutz.
|
|
||||||
"""
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await db.get_leaderboard_with_names(timeframe, limit, anonymize)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get display leaderboard: {e}")
|
|
||||||
return []
|
|
||||||
|
|||||||
@@ -1,322 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to game/game_models.py
|
||||||
# Breakpilot Drive - Game API Models & Data
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Pydantic models, difficulty mappings, and sample questions.
|
_sys.modules[__name__] = _importlib.import_module("game.game_models")
|
||||||
# Extracted from game_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Literal, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Pydantic Models
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
class LearningLevel(BaseModel):
|
|
||||||
"""Lernniveau eines Benutzers aus dem Breakpilot-System"""
|
|
||||||
user_id: str
|
|
||||||
overall_level: int # 1-5 (1=Anfaenger/Klasse 2, 5=Fortgeschritten/Klasse 6)
|
|
||||||
math_level: float
|
|
||||||
german_level: float
|
|
||||||
english_level: float
|
|
||||||
last_updated: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class GameDifficulty(BaseModel):
|
|
||||||
"""Spielschwierigkeit basierend auf Lernniveau"""
|
|
||||||
lane_speed: float # Geschwindigkeit in m/s
|
|
||||||
obstacle_frequency: float # Hindernisse pro Sekunde
|
|
||||||
power_up_chance: float # Wahrscheinlichkeit fuer Power-Ups (0-1)
|
|
||||||
question_complexity: int # 1-5
|
|
||||||
answer_time: int # Sekunden zum Antworten
|
|
||||||
hints_enabled: bool
|
|
||||||
speech_speed: float # Sprechgeschwindigkeit fuer Audio-Version
|
|
||||||
|
|
||||||
|
|
||||||
class QuizQuestion(BaseModel):
|
|
||||||
"""Quiz-Frage fuer das Spiel"""
|
|
||||||
id: str
|
|
||||||
question_text: str
|
|
||||||
audio_url: Optional[str] = None
|
|
||||||
options: List[str] # 2-4 Antwortmoeglichkeiten
|
|
||||||
correct_index: int # 0-3
|
|
||||||
difficulty: int # 1-5
|
|
||||||
subject: Literal["math", "german", "english", "general"]
|
|
||||||
grade_level: Optional[int] = None # 2-6
|
|
||||||
# NEU: Quiz-Modus
|
|
||||||
quiz_mode: Literal["quick", "pause"] = "quick" # quick=waehrend Fahrt, pause=Spiel haelt an
|
|
||||||
visual_trigger: Optional[str] = None # z.B. "bridge", "house", "tree" - loest Frage aus
|
|
||||||
time_limit_seconds: Optional[float] = None # Zeit bis Antwort noetig (bei quick)
|
|
||||||
|
|
||||||
|
|
||||||
class QuizAnswer(BaseModel):
|
|
||||||
"""Antwort auf eine Quiz-Frage"""
|
|
||||||
question_id: str
|
|
||||||
selected_index: int
|
|
||||||
answer_time_ms: int # Zeit bis zur Antwort in ms
|
|
||||||
was_correct: bool
|
|
||||||
|
|
||||||
|
|
||||||
class GameSession(BaseModel):
|
|
||||||
"""Spielsession-Daten fuer Analytics"""
|
|
||||||
user_id: str
|
|
||||||
game_mode: Literal["video", "audio"]
|
|
||||||
duration_seconds: int
|
|
||||||
distance_traveled: float
|
|
||||||
score: int
|
|
||||||
questions_answered: int
|
|
||||||
questions_correct: int
|
|
||||||
difficulty_level: int
|
|
||||||
quiz_answers: Optional[List[QuizAnswer]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SessionResponse(BaseModel):
|
|
||||||
"""Antwort nach Session-Speicherung"""
|
|
||||||
session_id: str
|
|
||||||
status: str
|
|
||||||
new_level: Optional[int] = None # Falls Lernniveau angepasst wurde
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Schwierigkeits-Mapping
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
DIFFICULTY_MAPPING = {
|
|
||||||
1: GameDifficulty(
|
|
||||||
lane_speed=3.0,
|
|
||||||
obstacle_frequency=0.3,
|
|
||||||
power_up_chance=0.4,
|
|
||||||
question_complexity=1,
|
|
||||||
answer_time=15,
|
|
||||||
hints_enabled=True,
|
|
||||||
speech_speed=0.8
|
|
||||||
),
|
|
||||||
2: GameDifficulty(
|
|
||||||
lane_speed=4.0,
|
|
||||||
obstacle_frequency=0.4,
|
|
||||||
power_up_chance=0.35,
|
|
||||||
question_complexity=2,
|
|
||||||
answer_time=12,
|
|
||||||
hints_enabled=True,
|
|
||||||
speech_speed=0.9
|
|
||||||
),
|
|
||||||
3: GameDifficulty(
|
|
||||||
lane_speed=5.0,
|
|
||||||
obstacle_frequency=0.5,
|
|
||||||
power_up_chance=0.3,
|
|
||||||
question_complexity=3,
|
|
||||||
answer_time=10,
|
|
||||||
hints_enabled=True,
|
|
||||||
speech_speed=1.0
|
|
||||||
),
|
|
||||||
4: GameDifficulty(
|
|
||||||
lane_speed=6.0,
|
|
||||||
obstacle_frequency=0.6,
|
|
||||||
power_up_chance=0.25,
|
|
||||||
question_complexity=4,
|
|
||||||
answer_time=8,
|
|
||||||
hints_enabled=False,
|
|
||||||
speech_speed=1.1
|
|
||||||
),
|
|
||||||
5: GameDifficulty(
|
|
||||||
lane_speed=7.0,
|
|
||||||
obstacle_frequency=0.7,
|
|
||||||
power_up_chance=0.2,
|
|
||||||
question_complexity=5,
|
|
||||||
answer_time=6,
|
|
||||||
hints_enabled=False,
|
|
||||||
speech_speed=1.2
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Beispiel Quiz-Fragen (spaeter aus DB laden)
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
SAMPLE_QUESTIONS = [
|
|
||||||
# ==============================================
|
|
||||||
# QUICK QUESTIONS (waehrend der Fahrt, visuell getriggert)
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
# Englisch Vokabeln - Objekte im Spiel (QUICK MODE)
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-bridge", question_text="What is this?",
|
|
||||||
options=["Bridge", "House"], correct_index=0,
|
|
||||||
difficulty=1, subject="english", grade_level=3,
|
|
||||||
quiz_mode="quick", visual_trigger="bridge", time_limit_seconds=3.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-tree", question_text="What is this?",
|
|
||||||
options=["Tree", "Flower"], correct_index=0,
|
|
||||||
difficulty=1, subject="english", grade_level=3,
|
|
||||||
quiz_mode="quick", visual_trigger="tree", time_limit_seconds=3.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-house", question_text="What is this?",
|
|
||||||
options=["House", "Car"], correct_index=0,
|
|
||||||
difficulty=1, subject="english", grade_level=3,
|
|
||||||
quiz_mode="quick", visual_trigger="house", time_limit_seconds=3.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-car", question_text="What is this?",
|
|
||||||
options=["Car", "Bus"], correct_index=0,
|
|
||||||
difficulty=1, subject="english", grade_level=3,
|
|
||||||
quiz_mode="quick", visual_trigger="car", time_limit_seconds=2.5
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-mountain", question_text="What is this?",
|
|
||||||
options=["Hill", "Mountain", "Valley"], correct_index=1,
|
|
||||||
difficulty=2, subject="english", grade_level=4,
|
|
||||||
quiz_mode="quick", visual_trigger="mountain", time_limit_seconds=3.5
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="vq-river", question_text="What is this?",
|
|
||||||
options=["Lake", "River", "Sea"], correct_index=1,
|
|
||||||
difficulty=2, subject="english", grade_level=4,
|
|
||||||
quiz_mode="quick", visual_trigger="river", time_limit_seconds=3.5
|
|
||||||
),
|
|
||||||
|
|
||||||
# Schnelle Rechenaufgaben (QUICK MODE)
|
|
||||||
QuizQuestion(
|
|
||||||
id="mq-1", question_text="3 + 4 = ?",
|
|
||||||
options=["6", "7"], correct_index=1,
|
|
||||||
difficulty=1, subject="math", grade_level=2,
|
|
||||||
quiz_mode="quick", time_limit_seconds=4.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mq-2", question_text="5 x 2 = ?",
|
|
||||||
options=["10", "12"], correct_index=0,
|
|
||||||
difficulty=1, subject="math", grade_level=2,
|
|
||||||
quiz_mode="quick", time_limit_seconds=4.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mq-3", question_text="8 - 3 = ?",
|
|
||||||
options=["4", "5"], correct_index=1,
|
|
||||||
difficulty=1, subject="math", grade_level=2,
|
|
||||||
quiz_mode="quick", time_limit_seconds=3.5
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mq-4", question_text="6 x 7 = ?",
|
|
||||||
options=["42", "48"], correct_index=0,
|
|
||||||
difficulty=2, subject="math", grade_level=3,
|
|
||||||
quiz_mode="quick", time_limit_seconds=5.0
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mq-5", question_text="9 x 8 = ?",
|
|
||||||
options=["72", "64"], correct_index=0,
|
|
||||||
difficulty=3, subject="math", grade_level=4,
|
|
||||||
quiz_mode="quick", time_limit_seconds=5.0
|
|
||||||
),
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# PAUSE QUESTIONS (Spiel haelt an, mehr Zeit)
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
# Mathe Level 1-2 (Klasse 2-3) - PAUSE MODE
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp1-1", question_text="Anna hat 5 Aepfel. Sie bekommt 3 dazu. Wie viele hat sie jetzt?",
|
|
||||||
options=["6", "7", "8", "9"], correct_index=2,
|
|
||||||
difficulty=1, subject="math", grade_level=2,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp2-1", question_text="Ein Bus hat 24 Sitze. 18 sind besetzt. Wie viele sind frei?",
|
|
||||||
options=["4", "5", "6", "7"], correct_index=2,
|
|
||||||
difficulty=2, subject="math", grade_level=3,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp2-2", question_text="Was ist 45 + 27?",
|
|
||||||
options=["72", "62", "82", "70"], correct_index=0,
|
|
||||||
difficulty=2, subject="math", grade_level=3,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
|
|
||||||
# Mathe Level 3-4 (Klasse 4-5) - PAUSE MODE
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp3-1", question_text="Was ist 7 x 8?",
|
|
||||||
options=["54", "56", "58", "48"], correct_index=1,
|
|
||||||
difficulty=3, subject="math", grade_level=4,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp3-2", question_text="Ein Rechteck ist 8m lang und 5m breit. Wie gross ist die Flaeche?",
|
|
||||||
options=["35 m2", "40 m2", "45 m2", "26 m2"], correct_index=1,
|
|
||||||
difficulty=3, subject="math", grade_level=4,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp4-1", question_text="Was ist 15% von 80?",
|
|
||||||
options=["10", "12", "8", "15"], correct_index=1,
|
|
||||||
difficulty=4, subject="math", grade_level=5,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp4-2", question_text="Was ist 3/4 + 1/2?",
|
|
||||||
options=["5/4", "4/6", "1", "5/6"], correct_index=0,
|
|
||||||
difficulty=4, subject="math", grade_level=5,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
|
|
||||||
# Mathe Level 5 (Klasse 6) - PAUSE MODE
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp5-1", question_text="Was ist (-5) x (-3)?",
|
|
||||||
options=["-15", "15", "-8", "8"], correct_index=1,
|
|
||||||
difficulty=5, subject="math", grade_level=6,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="mp5-2", question_text="Loesung von 2x + 5 = 11?",
|
|
||||||
options=["2", "3", "4", "6"], correct_index=1,
|
|
||||||
difficulty=5, subject="math", grade_level=6,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
|
|
||||||
# Deutsch - PAUSE MODE (brauchen Lesezeit)
|
|
||||||
QuizQuestion(
|
|
||||||
id="dp1-1", question_text="Welches Wort ist ein Nomen?",
|
|
||||||
options=["laufen", "schnell", "Hund", "und"], correct_index=2,
|
|
||||||
difficulty=1, subject="german", grade_level=2,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="dp2-1", question_text="Was ist die Mehrzahl von 'Haus'?",
|
|
||||||
options=["Haeuse", "Haeuser", "Hausern", "Haus"], correct_index=1,
|
|
||||||
difficulty=2, subject="german", grade_level=3,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="dp3-1", question_text="Welches Verb steht im Praeteritum?",
|
|
||||||
options=["geht", "ging", "gegangen", "gehen"], correct_index=1,
|
|
||||||
difficulty=3, subject="german", grade_level=4,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="dp3-2", question_text="Finde den Rechtschreibfehler: 'Der Hund leuft schnell.'",
|
|
||||||
options=["Hund", "leuft", "schnell", "Der"], correct_index=1,
|
|
||||||
difficulty=3, subject="german", grade_level=4,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
|
|
||||||
# Englisch Saetze - PAUSE MODE
|
|
||||||
QuizQuestion(
|
|
||||||
id="ep3-1", question_text="How do you say 'Schmetterling'?",
|
|
||||||
options=["bird", "bee", "butterfly", "beetle"], correct_index=2,
|
|
||||||
difficulty=3, subject="english", grade_level=4,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="ep4-1", question_text="Choose the correct form: She ___ to school.",
|
|
||||||
options=["go", "goes", "going", "gone"], correct_index=1,
|
|
||||||
difficulty=4, subject="english", grade_level=5,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
QuizQuestion(
|
|
||||||
id="ep4-2", question_text="What is the past tense of 'run'?",
|
|
||||||
options=["runned", "ran", "runed", "running"], correct_index=1,
|
|
||||||
difficulty=4, subject="english", grade_level=5,
|
|
||||||
quiz_mode="pause"
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,296 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to game/routes.py
|
||||||
# Breakpilot Drive - Game API Core Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Core game endpoints: learning level, difficulty, quiz questions.
|
_sys.modules[__name__] = _importlib.import_module("game.routes")
|
||||||
# Session/stats/leaderboard routes are in game_session_routes.py.
|
|
||||||
# Extracted from game_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
import random
|
|
||||||
import uuid
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from game_models import (
|
|
||||||
LearningLevel,
|
|
||||||
GameDifficulty,
|
|
||||||
QuizQuestion,
|
|
||||||
QuizAnswer,
|
|
||||||
GameSession,
|
|
||||||
SessionResponse,
|
|
||||||
DIFFICULTY_MAPPING,
|
|
||||||
SAMPLE_QUESTIONS,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Feature flags
|
|
||||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
|
||||||
REQUIRE_AUTH = os.getenv("GAME_REQUIRE_AUTH", "false").lower() == "true"
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Auth Dependency (Optional)
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
async def get_optional_current_user(request: Request) -> Optional[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
Optional auth dependency for Game API.
|
|
||||||
|
|
||||||
If GAME_REQUIRE_AUTH=true: Requires valid JWT token
|
|
||||||
If GAME_REQUIRE_AUTH=false: Returns None (anonymous access)
|
|
||||||
|
|
||||||
In development mode without auth, returns demo user.
|
|
||||||
"""
|
|
||||||
if not REQUIRE_AUTH:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
from auth import get_current_user
|
|
||||||
return await get_current_user(request)
|
|
||||||
except ImportError:
|
|
||||||
logger.warning("Auth module not available")
|
|
||||||
return None
|
|
||||||
except HTTPException:
|
|
||||||
raise # Re-raise auth errors
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Auth error: {e}")
|
|
||||||
raise HTTPException(status_code=401, detail="Authentication failed")
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_id_from_auth(
|
|
||||||
user: Optional[Dict[str, Any]],
|
|
||||||
requested_user_id: str
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Get the effective user ID, respecting auth when enabled.
|
|
||||||
|
|
||||||
If auth is enabled and user is authenticated:
|
|
||||||
- Returns user's own ID if requested_user_id matches
|
|
||||||
- For parents: allows access to child IDs from token
|
|
||||||
- For teachers: allows access to student IDs (future)
|
|
||||||
|
|
||||||
If auth is disabled: Returns requested_user_id as-is
|
|
||||||
"""
|
|
||||||
if not REQUIRE_AUTH or user is None:
|
|
||||||
return requested_user_id
|
|
||||||
|
|
||||||
user_id = user.get("user_id", "")
|
|
||||||
|
|
||||||
# Same user - always allowed
|
|
||||||
if requested_user_id == user_id:
|
|
||||||
return user_id
|
|
||||||
|
|
||||||
# Check for parent accessing child data
|
|
||||||
children_ids = user.get("raw_claims", {}).get("children_ids", [])
|
|
||||||
if requested_user_id in children_ids:
|
|
||||||
return requested_user_id
|
|
||||||
|
|
||||||
# Check for teacher accessing student data (future)
|
|
||||||
realm_roles = user.get("realm_roles", [])
|
|
||||||
if "lehrer" in realm_roles or "teacher" in realm_roles:
|
|
||||||
# Teachers can access any student in their class (implement class check later)
|
|
||||||
return requested_user_id
|
|
||||||
|
|
||||||
# Admin bypass
|
|
||||||
if "admin" in realm_roles:
|
|
||||||
return requested_user_id
|
|
||||||
|
|
||||||
# Not authorized
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=403,
|
|
||||||
detail="Not authorized to access this user's data"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# In-Memory Session Storage (Fallback wenn DB nicht verfuegbar)
|
|
||||||
_sessions: dict[str, GameSession] = {}
|
|
||||||
_user_levels: dict[str, LearningLevel] = {}
|
|
||||||
|
|
||||||
# Database integration
|
|
||||||
_game_db = None
|
|
||||||
|
|
||||||
async def get_game_database():
|
|
||||||
"""Get game database instance with lazy initialization."""
|
|
||||||
global _game_db
|
|
||||||
if not USE_DATABASE:
|
|
||||||
return None
|
|
||||||
if _game_db is None:
|
|
||||||
try:
|
|
||||||
from game.database import get_game_db
|
|
||||||
_game_db = await get_game_db()
|
|
||||||
logger.info("Game database initialized")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Game database not available, using in-memory: {e}")
|
|
||||||
return _game_db
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpunkte
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/learning-level/{user_id}", response_model=LearningLevel)
|
|
||||||
async def get_learning_level(
|
|
||||||
user_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> LearningLevel:
|
|
||||||
"""
|
|
||||||
Holt das aktuelle Lernniveau eines Benutzers aus Breakpilot.
|
|
||||||
|
|
||||||
- Wird beim Spielstart aufgerufen um Schwierigkeit anzupassen
|
|
||||||
- Gibt Level 1-5 zurueck (1=Anfaenger, 5=Fortgeschritten)
|
|
||||||
- Cached Werte fuer schnellen Zugriff
|
|
||||||
- Speichert in PostgreSQL wenn verfuegbar
|
|
||||||
- Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
# Try database first
|
|
||||||
db = await get_game_database()
|
|
||||||
if db:
|
|
||||||
state = await db.get_learning_state(user_id)
|
|
||||||
if state:
|
|
||||||
return LearningLevel(
|
|
||||||
user_id=user_id,
|
|
||||||
overall_level=state.overall_level,
|
|
||||||
math_level=state.math_level,
|
|
||||||
german_level=state.german_level,
|
|
||||||
english_level=state.english_level,
|
|
||||||
last_updated=state.updated_at or datetime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create new state in database
|
|
||||||
new_state = await db.create_or_update_learning_state(
|
|
||||||
student_id=user_id,
|
|
||||||
overall_level=3,
|
|
||||||
math_level=3.0,
|
|
||||||
german_level=3.0,
|
|
||||||
english_level=3.0
|
|
||||||
)
|
|
||||||
if new_state:
|
|
||||||
return LearningLevel(
|
|
||||||
user_id=user_id,
|
|
||||||
overall_level=new_state.overall_level,
|
|
||||||
math_level=new_state.math_level,
|
|
||||||
german_level=new_state.german_level,
|
|
||||||
english_level=new_state.english_level,
|
|
||||||
last_updated=new_state.updated_at or datetime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Fallback to in-memory
|
|
||||||
if user_id in _user_levels:
|
|
||||||
return _user_levels[user_id]
|
|
||||||
|
|
||||||
# Standard-Level fuer neue Benutzer
|
|
||||||
default_level = LearningLevel(
|
|
||||||
user_id=user_id,
|
|
||||||
overall_level=3, # Mittleres Level als Default
|
|
||||||
math_level=3.0,
|
|
||||||
german_level=3.0,
|
|
||||||
english_level=3.0,
|
|
||||||
last_updated=datetime.now()
|
|
||||||
)
|
|
||||||
_user_levels[user_id] = default_level
|
|
||||||
return default_level
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/difficulty/{level}", response_model=GameDifficulty)
|
|
||||||
async def get_game_difficulty(level: int) -> GameDifficulty:
|
|
||||||
"""
|
|
||||||
Gibt Spielparameter basierend auf Lernniveau zurueck.
|
|
||||||
|
|
||||||
Level 1-5 werden auf Spielgeschwindigkeit, Hindernisfrequenz,
|
|
||||||
Fragen-Schwierigkeit etc. gemappt.
|
|
||||||
"""
|
|
||||||
if level < 1 or level > 5:
|
|
||||||
raise HTTPException(status_code=400, detail="Level muss zwischen 1 und 5 sein")
|
|
||||||
|
|
||||||
return DIFFICULTY_MAPPING[level]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/quiz/questions", response_model=List[QuizQuestion])
|
|
||||||
async def get_quiz_questions(
|
|
||||||
difficulty: int = Query(3, ge=1, le=5, description="Schwierigkeitsgrad 1-5"),
|
|
||||||
count: int = Query(10, ge=1, le=50, description="Anzahl der Fragen"),
|
|
||||||
subject: Optional[str] = Query(None, description="Fach: math, german, english, oder None fuer gemischt"),
|
|
||||||
mode: Optional[str] = Query(None, description="Quiz-Modus: quick (waehrend Fahrt), pause (Spiel pausiert), oder None fuer beide")
|
|
||||||
) -> List[QuizQuestion]:
|
|
||||||
"""
|
|
||||||
Holt Quiz-Fragen fuer das Spiel.
|
|
||||||
|
|
||||||
- Filtert nach Schwierigkeitsgrad (+/- 1 Level)
|
|
||||||
- Optional nach Fach filterbar
|
|
||||||
- Optional nach Modus: "quick" (visuelle Fragen waehrend Fahrt) oder "pause" (Denkaufgaben)
|
|
||||||
- Gibt zufaellige Auswahl zurueck
|
|
||||||
"""
|
|
||||||
# Fragen nach Schwierigkeit filtern (+/- 1 Level Toleranz)
|
|
||||||
filtered = [
|
|
||||||
q for q in SAMPLE_QUESTIONS
|
|
||||||
if abs(q.difficulty - difficulty) <= 1
|
|
||||||
and (subject is None or q.subject == subject)
|
|
||||||
and (mode is None or q.quiz_mode == mode)
|
|
||||||
]
|
|
||||||
|
|
||||||
if not filtered:
|
|
||||||
# Fallback: Alle Fragen wenn keine passenden gefunden
|
|
||||||
filtered = [q for q in SAMPLE_QUESTIONS if mode is None or q.quiz_mode == mode]
|
|
||||||
|
|
||||||
# Zufaellige Auswahl
|
|
||||||
selected = random.sample(filtered, min(count, len(filtered)))
|
|
||||||
return selected
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/quiz/visual-triggers")
|
|
||||||
async def get_visual_triggers() -> List[dict]:
|
|
||||||
"""
|
|
||||||
Gibt alle verfuegbaren visuellen Trigger zurueck.
|
|
||||||
|
|
||||||
Unity verwendet diese Liste um zu wissen, welche Objekte
|
|
||||||
im Spiel Quiz-Fragen ausloesen koennen.
|
|
||||||
"""
|
|
||||||
triggers = {}
|
|
||||||
for q in SAMPLE_QUESTIONS:
|
|
||||||
if q.visual_trigger and q.quiz_mode == "quick":
|
|
||||||
if q.visual_trigger not in triggers:
|
|
||||||
triggers[q.visual_trigger] = {
|
|
||||||
"trigger": q.visual_trigger,
|
|
||||||
"question_count": 0,
|
|
||||||
"difficulties": set(),
|
|
||||||
"subjects": set()
|
|
||||||
}
|
|
||||||
triggers[q.visual_trigger]["question_count"] += 1
|
|
||||||
triggers[q.visual_trigger]["difficulties"].add(q.difficulty)
|
|
||||||
triggers[q.visual_trigger]["subjects"].add(q.subject)
|
|
||||||
|
|
||||||
# Sets zu Listen konvertieren fuer JSON
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"trigger": t["trigger"],
|
|
||||||
"question_count": t["question_count"],
|
|
||||||
"difficulties": list(t["difficulties"]),
|
|
||||||
"subjects": list(t["subjects"])
|
|
||||||
}
|
|
||||||
for t in triggers.values()
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/quiz/answer")
|
|
||||||
async def submit_quiz_answer(answer: QuizAnswer) -> dict:
|
|
||||||
"""
|
|
||||||
Verarbeitet eine Quiz-Antwort (fuer Echtzeit-Feedback).
|
|
||||||
|
|
||||||
In der finalen Version: Speichert in Session, updated Analytics.
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"question_id": answer.question_id,
|
|
||||||
"was_correct": answer.was_correct,
|
|
||||||
"points": 500 if answer.was_correct else -100,
|
|
||||||
"message": "Richtig! Weiter so!" if answer.was_correct else "Nicht ganz, versuch es nochmal!"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,395 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to game/session_routes.py
|
||||||
# Breakpilot Drive - Game Session & Stats Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Session saving, leaderboard, stats, suggestions,
|
_sys.modules[__name__] = _importlib.import_module("game.session_routes")
|
||||||
# quiz generation, and health check.
|
|
||||||
# Extracted from game_routes.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
import uuid
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from game_models import (
|
|
||||||
LearningLevel,
|
|
||||||
QuizQuestion,
|
|
||||||
GameSession,
|
|
||||||
SessionResponse,
|
|
||||||
SAMPLE_QUESTIONS,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Import shared state and helpers from game_routes
|
|
||||||
# (these are the canonical instances)
|
|
||||||
from game_routes import (
|
|
||||||
get_optional_current_user,
|
|
||||||
get_user_id_from_auth,
|
|
||||||
get_game_database,
|
|
||||||
get_quiz_questions,
|
|
||||||
_sessions,
|
|
||||||
_user_levels,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/game", tags=["Breakpilot Drive"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/session", response_model=SessionResponse)
|
|
||||||
async def save_game_session(
|
|
||||||
session: GameSession,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> SessionResponse:
|
|
||||||
"""
|
|
||||||
Speichert eine komplette Spielsession.
|
|
||||||
|
|
||||||
- Protokolliert Score, Distanz, Fragen-Performance
|
|
||||||
- Aktualisiert Lernniveau bei genuegend Daten
|
|
||||||
- Wird am Ende jedes Spiels aufgerufen
|
|
||||||
- Speichert in PostgreSQL wenn verfuegbar
|
|
||||||
- Bei GAME_REQUIRE_AUTH=true: User-ID aus Token
|
|
||||||
"""
|
|
||||||
# If auth is enabled, use user_id from token (ignore session.user_id)
|
|
||||||
effective_user_id = session.user_id
|
|
||||||
if REQUIRE_AUTH and user:
|
|
||||||
effective_user_id = user.get("user_id", session.user_id)
|
|
||||||
|
|
||||||
session_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# Lernniveau-Anpassung basierend auf Performance
|
|
||||||
new_level = None
|
|
||||||
old_level = 3 # Default
|
|
||||||
|
|
||||||
# Try to get current level first
|
|
||||||
db = await get_game_database()
|
|
||||||
if db:
|
|
||||||
state = await db.get_learning_state(effective_user_id)
|
|
||||||
if state:
|
|
||||||
old_level = state.overall_level
|
|
||||||
else:
|
|
||||||
# Create initial state if not exists
|
|
||||||
await db.create_or_update_learning_state(effective_user_id)
|
|
||||||
old_level = 3
|
|
||||||
elif effective_user_id in _user_levels:
|
|
||||||
old_level = _user_levels[effective_user_id].overall_level
|
|
||||||
|
|
||||||
# Calculate level adjustment
|
|
||||||
if session.questions_answered >= 5:
|
|
||||||
accuracy = session.questions_correct / session.questions_answered
|
|
||||||
|
|
||||||
# Anpassung: Wenn >80% korrekt und max nicht erreicht -> Level up
|
|
||||||
if accuracy >= 0.8 and old_level < 5:
|
|
||||||
new_level = old_level + 1
|
|
||||||
# Wenn <40% korrekt und min nicht erreicht -> Level down
|
|
||||||
elif accuracy < 0.4 and old_level > 1:
|
|
||||||
new_level = old_level - 1
|
|
||||||
|
|
||||||
# Save to database
|
|
||||||
if db:
|
|
||||||
# Save session
|
|
||||||
db_session_id = await db.save_game_session(
|
|
||||||
student_id=effective_user_id,
|
|
||||||
game_mode=session.game_mode,
|
|
||||||
duration_seconds=session.duration_seconds,
|
|
||||||
distance_traveled=session.distance_traveled,
|
|
||||||
score=session.score,
|
|
||||||
questions_answered=session.questions_answered,
|
|
||||||
questions_correct=session.questions_correct,
|
|
||||||
difficulty_level=session.difficulty_level,
|
|
||||||
)
|
|
||||||
if db_session_id:
|
|
||||||
session_id = db_session_id
|
|
||||||
|
|
||||||
# Save individual quiz answers if provided
|
|
||||||
if session.quiz_answers:
|
|
||||||
for answer in session.quiz_answers:
|
|
||||||
await db.save_quiz_answer(
|
|
||||||
session_id=session_id,
|
|
||||||
question_id=answer.question_id,
|
|
||||||
subject="general", # Could be enhanced to track actual subject
|
|
||||||
difficulty=session.difficulty_level,
|
|
||||||
is_correct=answer.was_correct,
|
|
||||||
answer_time_ms=answer.answer_time_ms,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update learning stats
|
|
||||||
duration_minutes = session.duration_seconds // 60
|
|
||||||
await db.update_learning_stats(
|
|
||||||
student_id=effective_user_id,
|
|
||||||
duration_minutes=duration_minutes,
|
|
||||||
questions_answered=session.questions_answered,
|
|
||||||
questions_correct=session.questions_correct,
|
|
||||||
new_level=new_level,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Fallback to in-memory
|
|
||||||
_sessions[session_id] = session
|
|
||||||
|
|
||||||
if new_level:
|
|
||||||
_user_levels[effective_user_id] = LearningLevel(
|
|
||||||
user_id=effective_user_id,
|
|
||||||
overall_level=new_level,
|
|
||||||
math_level=float(new_level),
|
|
||||||
german_level=float(new_level),
|
|
||||||
english_level=float(new_level),
|
|
||||||
last_updated=datetime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
return SessionResponse(
|
|
||||||
session_id=session_id,
|
|
||||||
status="saved",
|
|
||||||
new_level=new_level
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/sessions/{user_id}")
|
|
||||||
async def get_user_sessions(
|
|
||||||
user_id: str,
|
|
||||||
limit: int = Query(10, ge=1, le=100),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> List[dict]:
|
|
||||||
"""
|
|
||||||
Holt die letzten Spielsessions eines Benutzers.
|
|
||||||
|
|
||||||
Fuer Statistiken und Fortschrittsanzeige.
|
|
||||||
Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten.
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
# Try database first
|
|
||||||
db = await get_game_database()
|
|
||||||
if db:
|
|
||||||
sessions = await db.get_user_sessions(user_id, limit)
|
|
||||||
if sessions:
|
|
||||||
return sessions
|
|
||||||
|
|
||||||
# Fallback to in-memory
|
|
||||||
user_sessions = [
|
|
||||||
{"session_id": sid, **s.model_dump()}
|
|
||||||
for sid, s in _sessions.items()
|
|
||||||
if s.user_id == user_id
|
|
||||||
]
|
|
||||||
return user_sessions[:limit]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/leaderboard")
|
|
||||||
async def get_leaderboard(
|
|
||||||
timeframe: str = Query("day", description="day, week, month, all"),
|
|
||||||
limit: int = Query(10, ge=1, le=100)
|
|
||||||
) -> List[dict]:
|
|
||||||
"""
|
|
||||||
Gibt Highscore-Liste zurueck.
|
|
||||||
|
|
||||||
- Sortiert nach Punktzahl
|
|
||||||
- Optional nach Zeitraum filterbar
|
|
||||||
"""
|
|
||||||
# Try database first
|
|
||||||
db = await get_game_database()
|
|
||||||
if db:
|
|
||||||
leaderboard = await db.get_leaderboard(timeframe, limit)
|
|
||||||
if leaderboard:
|
|
||||||
return leaderboard
|
|
||||||
|
|
||||||
# Fallback to in-memory
|
|
||||||
# Aggregiere Scores pro User
|
|
||||||
user_scores: dict[str, int] = {}
|
|
||||||
for session in _sessions.values():
|
|
||||||
if session.user_id not in user_scores:
|
|
||||||
user_scores[session.user_id] = 0
|
|
||||||
user_scores[session.user_id] += session.score
|
|
||||||
|
|
||||||
# Sortieren und limitieren
|
|
||||||
leaderboard = [
|
|
||||||
{"rank": i + 1, "user_id": uid, "total_score": score}
|
|
||||||
for i, (uid, score) in enumerate(
|
|
||||||
sorted(user_scores.items(), key=lambda x: x[1], reverse=True)[:limit]
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
return leaderboard
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/stats/{user_id}")
|
|
||||||
async def get_user_stats(
|
|
||||||
user_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Gibt detaillierte Statistiken fuer einen Benutzer zurueck.
|
|
||||||
|
|
||||||
- Gesamtstatistiken
|
|
||||||
- Fach-spezifische Statistiken
|
|
||||||
- Lernniveau-Verlauf
|
|
||||||
- Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
db = await get_game_database()
|
|
||||||
if db:
|
|
||||||
state = await db.get_learning_state(user_id)
|
|
||||||
subject_stats = await db.get_subject_stats(user_id)
|
|
||||||
|
|
||||||
if state:
|
|
||||||
return {
|
|
||||||
"user_id": user_id,
|
|
||||||
"overall_level": state.overall_level,
|
|
||||||
"math_level": state.math_level,
|
|
||||||
"german_level": state.german_level,
|
|
||||||
"english_level": state.english_level,
|
|
||||||
"total_play_time_minutes": state.total_play_time_minutes,
|
|
||||||
"total_sessions": state.total_sessions,
|
|
||||||
"questions_answered": state.questions_answered,
|
|
||||||
"questions_correct": state.questions_correct,
|
|
||||||
"accuracy": state.accuracy,
|
|
||||||
"subjects": subject_stats,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Fallback - return defaults
|
|
||||||
return {
|
|
||||||
"user_id": user_id,
|
|
||||||
"overall_level": 3,
|
|
||||||
"math_level": 3.0,
|
|
||||||
"german_level": 3.0,
|
|
||||||
"english_level": 3.0,
|
|
||||||
"total_play_time_minutes": 0,
|
|
||||||
"total_sessions": 0,
|
|
||||||
"questions_answered": 0,
|
|
||||||
"questions_correct": 0,
|
|
||||||
"accuracy": 0.0,
|
|
||||||
"subjects": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/suggestions/{user_id}")
|
|
||||||
async def get_learning_suggestions(
|
|
||||||
user_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Gibt adaptive Lernvorschlaege fuer einen Benutzer zurueck.
|
|
||||||
|
|
||||||
Basierend auf aktueller Performance und Lernhistorie.
|
|
||||||
Bei GAME_REQUIRE_AUTH=true: Nur eigene oder Kind-Daten.
|
|
||||||
"""
|
|
||||||
# Verify access rights
|
|
||||||
user_id = get_user_id_from_auth(user, user_id)
|
|
||||||
|
|
||||||
db = await get_game_database()
|
|
||||||
if not db:
|
|
||||||
return {"suggestions": [], "message": "Database not available"}
|
|
||||||
|
|
||||||
state = await db.get_learning_state(user_id)
|
|
||||||
if not state:
|
|
||||||
return {"suggestions": [], "message": "No learning state found"}
|
|
||||||
|
|
||||||
try:
|
|
||||||
from game.learning_rules import (
|
|
||||||
LearningContext,
|
|
||||||
get_rule_engine,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create context from state
|
|
||||||
context = LearningContext.from_learning_state(state)
|
|
||||||
|
|
||||||
# Get suggestions from rule engine
|
|
||||||
engine = get_rule_engine()
|
|
||||||
suggestions = engine.evaluate(context)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"user_id": user_id,
|
|
||||||
"overall_level": state.overall_level,
|
|
||||||
"suggestions": [
|
|
||||||
{
|
|
||||||
"title": s.title,
|
|
||||||
"description": s.description,
|
|
||||||
"action": s.action.value,
|
|
||||||
"priority": s.priority.name.lower(),
|
|
||||||
"metadata": s.metadata or {},
|
|
||||||
}
|
|
||||||
for s in suggestions[:3] # Top 3 suggestions
|
|
||||||
]
|
|
||||||
}
|
|
||||||
except ImportError:
|
|
||||||
return {"suggestions": [], "message": "Learning rules not available"}
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Failed to get suggestions: {e}")
|
|
||||||
return {"suggestions": [], "message": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/quiz/generate")
|
|
||||||
async def generate_quiz_questions(
|
|
||||||
difficulty: int = Query(3, ge=1, le=5, description="Schwierigkeitsgrad 1-5"),
|
|
||||||
count: int = Query(5, ge=1, le=20, description="Anzahl der Fragen"),
|
|
||||||
subject: Optional[str] = Query(None, description="Fach: math, german, english"),
|
|
||||||
mode: str = Query("quick", description="Quiz-Modus: quick oder pause"),
|
|
||||||
visual_trigger: Optional[str] = Query(None, description="Visueller Trigger: bridge, tree, house, etc.")
|
|
||||||
) -> List[dict]:
|
|
||||||
"""
|
|
||||||
Generiert Quiz-Fragen dynamisch via LLM.
|
|
||||||
|
|
||||||
Fallback auf statische Fragen wenn LLM nicht verfuegbar.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
from game.quiz_generator import get_quiz_generator
|
|
||||||
|
|
||||||
generator = await get_quiz_generator()
|
|
||||||
questions = await generator.get_questions(
|
|
||||||
difficulty=difficulty,
|
|
||||||
subject=subject or "general",
|
|
||||||
mode=mode,
|
|
||||||
count=count,
|
|
||||||
visual_trigger=visual_trigger
|
|
||||||
)
|
|
||||||
|
|
||||||
if questions:
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"id": f"gen-{i}",
|
|
||||||
"question_text": q.question_text,
|
|
||||||
"options": q.options,
|
|
||||||
"correct_index": q.correct_index,
|
|
||||||
"difficulty": q.difficulty,
|
|
||||||
"subject": q.subject,
|
|
||||||
"grade_level": q.grade_level,
|
|
||||||
"quiz_mode": q.quiz_mode,
|
|
||||||
"visual_trigger": q.visual_trigger,
|
|
||||||
"time_limit_seconds": q.time_limit_seconds,
|
|
||||||
}
|
|
||||||
for i, q in enumerate(questions)
|
|
||||||
]
|
|
||||||
except ImportError:
|
|
||||||
logger.info("Quiz generator not available, using static questions")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Quiz generation failed: {e}")
|
|
||||||
|
|
||||||
# Fallback to static questions
|
|
||||||
return await get_quiz_questions(difficulty, count, subject, mode)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def health_check() -> dict:
|
|
||||||
"""Health-Check fuer das Spiel-Backend."""
|
|
||||||
db = await get_game_database()
|
|
||||||
db_status = "connected" if db and db._connected else "disconnected"
|
|
||||||
|
|
||||||
# Check LLM availability
|
|
||||||
llm_status = "disabled"
|
|
||||||
try:
|
|
||||||
from game.quiz_generator import get_quiz_generator
|
|
||||||
generator = await get_quiz_generator()
|
|
||||||
llm_status = "connected" if generator._llm_available else "disconnected"
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"service": "breakpilot-drive",
|
|
||||||
"database": db_status,
|
|
||||||
"llm_generator": llm_status,
|
|
||||||
"auth_required": REQUIRE_AUTH,
|
|
||||||
"questions_available": len(SAMPLE_QUESTIONS),
|
|
||||||
"active_sessions": len(_sessions)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,178 +1,4 @@
|
|||||||
from __future__ import annotations
|
# Backward-compat shim -- module moved to units/learning.py
|
||||||
from pydantic import BaseModel, Field
|
import importlib as _importlib
|
||||||
from typing import List, Dict, Optional
|
import sys as _sys
|
||||||
from pathlib import Path
|
_sys.modules[__name__] = _importlib.import_module("units.learning")
|
||||||
from datetime import datetime
|
|
||||||
import uuid
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
|
|
||||||
# Basisverzeichnis für Arbeitsblätter & Lerneinheiten
|
|
||||||
BASE_DIR = Path.home() / "Arbeitsblaetter"
|
|
||||||
LEARNING_UNITS_DIR = BASE_DIR / "Lerneinheiten"
|
|
||||||
LEARNING_UNITS_FILE = LEARNING_UNITS_DIR / "learning_units.json"
|
|
||||||
|
|
||||||
# Thread-Lock, damit Dateizugriffe sicher bleiben
|
|
||||||
_lock = threading.Lock()
|
|
||||||
|
|
||||||
|
|
||||||
class LearningUnitBase(BaseModel):
|
|
||||||
title: str = Field(..., description="Titel der Lerneinheit, z.B. 'Das Auge – Klasse 7'")
|
|
||||||
description: Optional[str] = Field(None, description="Freitext-Beschreibung")
|
|
||||||
topic: Optional[str] = Field(None, description="Kurz-Thema, z.B. 'Auge'")
|
|
||||||
grade_level: Optional[str] = Field(None, description="Klassenstufe, z.B. '7'")
|
|
||||||
language: Optional[str] = Field("de", description="Hauptsprache der Lerneinheit (z.B. 'de', 'tr')")
|
|
||||||
worksheet_files: List[str] = Field(
|
|
||||||
default_factory=list,
|
|
||||||
description="Liste der zugeordneten Arbeitsblatt-Dateien (Basenames oder Pfade)"
|
|
||||||
)
|
|
||||||
status: str = Field(
|
|
||||||
"raw",
|
|
||||||
description="Pipeline-Status: raw, cleaned, qa_generated, mc_generated, cloze_generated"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LearningUnitCreate(LearningUnitBase):
|
|
||||||
"""Payload zum Erstellen einer neuen Lerneinheit."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class LearningUnitUpdate(BaseModel):
|
|
||||||
"""Teil-Update für eine Lerneinheit."""
|
|
||||||
title: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
topic: Optional[str] = None
|
|
||||||
grade_level: Optional[str] = None
|
|
||||||
language: Optional[str] = None
|
|
||||||
worksheet_files: Optional[List[str]] = None
|
|
||||||
status: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class LearningUnit(LearningUnitBase):
|
|
||||||
id: str
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, data: Dict) -> "LearningUnit":
|
|
||||||
data = data.copy()
|
|
||||||
if isinstance(data.get("created_at"), str):
|
|
||||||
data["created_at"] = datetime.fromisoformat(data["created_at"])
|
|
||||||
if isinstance(data.get("updated_at"), str):
|
|
||||||
data["updated_at"] = datetime.fromisoformat(data["updated_at"])
|
|
||||||
return cls(**data)
|
|
||||||
|
|
||||||
def to_dict(self) -> Dict:
|
|
||||||
d = self.dict()
|
|
||||||
d["created_at"] = self.created_at.isoformat()
|
|
||||||
d["updated_at"] = self.updated_at.isoformat()
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_storage():
|
|
||||||
"""Sorgt dafür, dass der Ordner und die JSON-Datei existieren."""
|
|
||||||
LEARNING_UNITS_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
if not LEARNING_UNITS_FILE.exists():
|
|
||||||
with LEARNING_UNITS_FILE.open("w", encoding="utf-8") as f:
|
|
||||||
json.dump({}, f)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_all_units() -> Dict[str, Dict]:
|
|
||||||
_ensure_storage()
|
|
||||||
with LEARNING_UNITS_FILE.open("r", encoding="utf-8") as f:
|
|
||||||
try:
|
|
||||||
data = json.load(f)
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
return {}
|
|
||||||
return data
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def _save_all_units(raw: Dict[str, Dict]) -> None:
|
|
||||||
_ensure_storage()
|
|
||||||
with LEARNING_UNITS_FILE.open("w", encoding="utf-8") as f:
|
|
||||||
json.dump(raw, f, ensure_ascii=False, indent=2)
|
|
||||||
|
|
||||||
|
|
||||||
def list_learning_units() -> List[LearningUnit]:
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
return [LearningUnit.from_dict(v) for v in raw.values()]
|
|
||||||
|
|
||||||
|
|
||||||
def get_learning_unit(unit_id: str) -> Optional[LearningUnit]:
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
data = raw.get(unit_id)
|
|
||||||
if not data:
|
|
||||||
return None
|
|
||||||
return LearningUnit.from_dict(data)
|
|
||||||
|
|
||||||
|
|
||||||
def create_learning_unit(payload: LearningUnitCreate) -> LearningUnit:
|
|
||||||
now = datetime.utcnow()
|
|
||||||
lu = LearningUnit(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
created_at=now,
|
|
||||||
updated_at=now,
|
|
||||||
**payload.dict()
|
|
||||||
)
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
raw[lu.id] = lu.to_dict()
|
|
||||||
_save_all_units(raw)
|
|
||||||
return lu
|
|
||||||
|
|
||||||
|
|
||||||
def update_learning_unit(unit_id: str, payload: LearningUnitUpdate) -> Optional[LearningUnit]:
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
existing = raw.get(unit_id)
|
|
||||||
if not existing:
|
|
||||||
return None
|
|
||||||
|
|
||||||
lu = LearningUnit.from_dict(existing)
|
|
||||||
update_data = payload.dict(exclude_unset=True)
|
|
||||||
|
|
||||||
for field, value in update_data.items():
|
|
||||||
setattr(lu, field, value)
|
|
||||||
|
|
||||||
lu.updated_at = datetime.utcnow()
|
|
||||||
raw[lu.id] = lu.to_dict()
|
|
||||||
_save_all_units(raw)
|
|
||||||
return lu
|
|
||||||
|
|
||||||
|
|
||||||
def delete_learning_unit(unit_id: str) -> bool:
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
if unit_id not in raw:
|
|
||||||
return False
|
|
||||||
del raw[unit_id]
|
|
||||||
_save_all_units(raw)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def attach_worksheets(unit_id: str, worksheet_files: List[str]) -> Optional[LearningUnit]:
|
|
||||||
"""
|
|
||||||
Hängt eine Liste von Arbeitsblatt-Dateien an eine bestehende Lerneinheit an.
|
|
||||||
Doppelte Einträge werden vermieden.
|
|
||||||
"""
|
|
||||||
with _lock:
|
|
||||||
raw = _load_all_units()
|
|
||||||
existing = raw.get(unit_id)
|
|
||||||
if not existing:
|
|
||||||
return None
|
|
||||||
|
|
||||||
lu = LearningUnit.from_dict(existing)
|
|
||||||
current_set = set(lu.worksheet_files)
|
|
||||||
for f in worksheet_files:
|
|
||||||
current_set.add(f)
|
|
||||||
lu.worksheet_files = sorted(current_set)
|
|
||||||
lu.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
raw[lu.id] = lu.to_dict()
|
|
||||||
_save_all_units(raw)
|
|
||||||
return lu
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,376 +1,4 @@
|
|||||||
from typing import List, Dict, Any, Optional
|
# Backward-compat shim -- module moved to units/learning_api.py
|
||||||
from datetime import datetime
|
import importlib as _importlib
|
||||||
from pathlib import Path
|
import sys as _sys
|
||||||
import json
|
_sys.modules[__name__] = _importlib.import_module("units.learning_api")
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from learning_units import (
|
|
||||||
LearningUnit,
|
|
||||||
LearningUnitCreate,
|
|
||||||
LearningUnitUpdate,
|
|
||||||
list_learning_units,
|
|
||||||
get_learning_unit,
|
|
||||||
create_learning_unit,
|
|
||||||
update_learning_unit,
|
|
||||||
delete_learning_unit,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(
|
|
||||||
prefix="/learning-units",
|
|
||||||
tags=["learning-units"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- Payload-Modelle für das Frontend ----------
|
|
||||||
|
|
||||||
|
|
||||||
class LearningUnitCreatePayload(BaseModel):
|
|
||||||
"""
|
|
||||||
Payload so, wie er aus dem Frontend kommt:
|
|
||||||
{
|
|
||||||
"student": "...",
|
|
||||||
"subject": "...",
|
|
||||||
"title": "...",
|
|
||||||
"grade": "7a"
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
student: Optional[str] = None
|
|
||||||
subject: Optional[str] = None
|
|
||||||
title: Optional[str] = None
|
|
||||||
grade: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class AttachWorksheetsPayload(BaseModel):
|
|
||||||
worksheet_files: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class RemoveWorksheetPayload(BaseModel):
|
|
||||||
worksheet_file: str
|
|
||||||
|
|
||||||
|
|
||||||
class GenerateFromAnalysisPayload(BaseModel):
|
|
||||||
analysis_data: Dict[str, Any]
|
|
||||||
num_questions: int = 8
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- Hilfsfunktion: Backend-Modell -> Frontend-Objekt ----------
|
|
||||||
|
|
||||||
|
|
||||||
def unit_to_frontend_dict(lu: LearningUnit) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Wandelt eine LearningUnit in das Format um, das das Frontend erwartet.
|
|
||||||
Wichtig sind:
|
|
||||||
- id
|
|
||||||
- label (sichtbarer Name)
|
|
||||||
- meta (Untertitelzeile)
|
|
||||||
- worksheet_files (Liste von Dateinamen)
|
|
||||||
"""
|
|
||||||
label = lu.title or "Lerneinheit"
|
|
||||||
|
|
||||||
# Meta-Text: z.B. "Thema: Auge · Klasse: 7a · angelegt am 10.12.2025"
|
|
||||||
meta_parts: List[str] = []
|
|
||||||
if lu.topic:
|
|
||||||
meta_parts.append(f"Thema: {lu.topic}")
|
|
||||||
if lu.grade_level:
|
|
||||||
meta_parts.append(f"Klasse: {lu.grade_level}")
|
|
||||||
created_str = lu.created_at.strftime("%d.%m.%Y")
|
|
||||||
meta_parts.append(f"angelegt am {created_str}")
|
|
||||||
|
|
||||||
meta = " · ".join(meta_parts)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": lu.id,
|
|
||||||
"label": label,
|
|
||||||
"meta": meta,
|
|
||||||
"title": lu.title,
|
|
||||||
"topic": lu.topic,
|
|
||||||
"grade_level": lu.grade_level,
|
|
||||||
"language": lu.language,
|
|
||||||
"status": lu.status,
|
|
||||||
"worksheet_files": lu.worksheet_files,
|
|
||||||
"created_at": lu.created_at.isoformat(),
|
|
||||||
"updated_at": lu.updated_at.isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- Endpunkte ----------
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[Dict[str, Any]])
|
|
||||||
def api_list_learning_units():
|
|
||||||
"""Alle Lerneinheiten für das Frontend auflisten."""
|
|
||||||
units = list_learning_units()
|
|
||||||
return [unit_to_frontend_dict(u) for u in units]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/", response_model=Dict[str, Any])
|
|
||||||
def api_create_learning_unit(payload: LearningUnitCreatePayload):
|
|
||||||
"""
|
|
||||||
Neue Lerneinheit anlegen.
|
|
||||||
Mapped das Frontend-Payload (student/subject/title/grade)
|
|
||||||
auf das generische LearningUnit-Modell.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Mindestens eines der Felder muss gesetzt sein
|
|
||||||
if not (payload.student or payload.subject or payload.title):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Bitte mindestens Schüler/in, Fach oder Thema angeben.",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Titel/Topic bestimmen
|
|
||||||
# sichtbarer Titel: bevorzugt Thema (title), sonst Kombination
|
|
||||||
if payload.title:
|
|
||||||
title = payload.title
|
|
||||||
else:
|
|
||||||
parts = []
|
|
||||||
if payload.subject:
|
|
||||||
parts.append(payload.subject)
|
|
||||||
if payload.student:
|
|
||||||
parts.append(payload.student)
|
|
||||||
title = " – ".join(parts) if parts else "Lerneinheit"
|
|
||||||
|
|
||||||
topic = payload.title or payload.subject or None
|
|
||||||
grade_level = payload.grade or None
|
|
||||||
|
|
||||||
lu_create = LearningUnitCreate(
|
|
||||||
title=title,
|
|
||||||
description=None,
|
|
||||||
topic=topic,
|
|
||||||
grade_level=grade_level,
|
|
||||||
language="de",
|
|
||||||
worksheet_files=[],
|
|
||||||
status="raw",
|
|
||||||
)
|
|
||||||
|
|
||||||
lu = create_learning_unit(lu_create)
|
|
||||||
return unit_to_frontend_dict(lu)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/attach-worksheets", response_model=Dict[str, Any])
|
|
||||||
def api_attach_worksheets(unit_id: str, payload: AttachWorksheetsPayload):
|
|
||||||
"""
|
|
||||||
Fügt der Lerneinheit eine oder mehrere Arbeitsblätter hinzu.
|
|
||||||
"""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
files_to_add = [f for f in payload.worksheet_files if f not in lu.worksheet_files]
|
|
||||||
if files_to_add:
|
|
||||||
new_list = lu.worksheet_files + files_to_add
|
|
||||||
update = LearningUnitUpdate(worksheet_files=new_list)
|
|
||||||
lu = update_learning_unit(unit_id, update)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=500, detail="Lerneinheit konnte nicht aktualisiert werden.")
|
|
||||||
|
|
||||||
return unit_to_frontend_dict(lu)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/remove-worksheet", response_model=Dict[str, Any])
|
|
||||||
def api_remove_worksheet(unit_id: str, payload: RemoveWorksheetPayload):
|
|
||||||
"""
|
|
||||||
Entfernt genau ein Arbeitsblatt aus der Lerneinheit.
|
|
||||||
"""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
if payload.worksheet_file not in lu.worksheet_files:
|
|
||||||
# Nichts zu tun, aber kein Fehler – einfach unverändert zurückgeben
|
|
||||||
return unit_to_frontend_dict(lu)
|
|
||||||
|
|
||||||
new_list = [f for f in lu.worksheet_files if f != payload.worksheet_file]
|
|
||||||
update = LearningUnitUpdate(worksheet_files=new_list)
|
|
||||||
lu = update_learning_unit(unit_id, update)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=500, detail="Lerneinheit konnte nicht aktualisiert werden.")
|
|
||||||
|
|
||||||
return unit_to_frontend_dict(lu)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{unit_id}")
|
|
||||||
def api_delete_learning_unit(unit_id: str):
|
|
||||||
"""
|
|
||||||
Lerneinheit komplett löschen (aktuell vom Frontend noch nicht verwendet).
|
|
||||||
"""
|
|
||||||
ok = delete_learning_unit(unit_id)
|
|
||||||
if not ok:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
return {"status": "deleted", "id": unit_id}
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- Generator-Endpunkte ----------
|
|
||||||
|
|
||||||
LERNEINHEITEN_DIR = os.path.expanduser("~/Arbeitsblaetter/Lerneinheiten")
|
|
||||||
|
|
||||||
|
|
||||||
def _save_analysis_and_get_path(unit_id: str, analysis_data: Dict[str, Any]) -> Path:
|
|
||||||
"""Save analysis_data to disk and return the path."""
|
|
||||||
os.makedirs(LERNEINHEITEN_DIR, exist_ok=True)
|
|
||||||
path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_analyse.json"
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(analysis_data, f, ensure_ascii=False, indent=2)
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/generate-qa")
|
|
||||||
def api_generate_qa(unit_id: str, payload: GenerateFromAnalysisPayload):
|
|
||||||
"""Generate Q&A items with Leitner fields from analysis data."""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ai_processing.qa_generator import generate_qa_from_analysis
|
|
||||||
qa_path = generate_qa_from_analysis(analysis_path, num_questions=payload.num_questions)
|
|
||||||
with open(qa_path, "r", encoding="utf-8") as f:
|
|
||||||
qa_data = json.load(f)
|
|
||||||
|
|
||||||
# Update unit status
|
|
||||||
update_learning_unit(unit_id, LearningUnitUpdate(status="qa_generated"))
|
|
||||||
logger.info(f"Generated QA for unit {unit_id}: {len(qa_data.get('qa_items', []))} items")
|
|
||||||
return qa_data
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"QA generation failed for {unit_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"QA-Generierung fehlgeschlagen: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/generate-mc")
|
|
||||||
def api_generate_mc(unit_id: str, payload: GenerateFromAnalysisPayload):
|
|
||||||
"""Generate multiple choice questions from analysis data."""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ai_processing.mc_generator import generate_mc_from_analysis
|
|
||||||
mc_path = generate_mc_from_analysis(analysis_path, num_questions=payload.num_questions)
|
|
||||||
with open(mc_path, "r", encoding="utf-8") as f:
|
|
||||||
mc_data = json.load(f)
|
|
||||||
|
|
||||||
update_learning_unit(unit_id, LearningUnitUpdate(status="mc_generated"))
|
|
||||||
logger.info(f"Generated MC for unit {unit_id}: {len(mc_data.get('questions', []))} questions")
|
|
||||||
return mc_data
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"MC generation failed for {unit_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"MC-Generierung fehlgeschlagen: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/generate-cloze")
|
|
||||||
def api_generate_cloze(unit_id: str, payload: GenerateFromAnalysisPayload):
|
|
||||||
"""Generate cloze (fill-in-the-blank) items from analysis data."""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ai_processing.cloze_generator import generate_cloze_from_analysis
|
|
||||||
cloze_path = generate_cloze_from_analysis(analysis_path)
|
|
||||||
with open(cloze_path, "r", encoding="utf-8") as f:
|
|
||||||
cloze_data = json.load(f)
|
|
||||||
|
|
||||||
update_learning_unit(unit_id, LearningUnitUpdate(status="cloze_generated"))
|
|
||||||
logger.info(f"Generated Cloze for unit {unit_id}: {len(cloze_data.get('cloze_items', []))} items")
|
|
||||||
return cloze_data
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Cloze generation failed for {unit_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Cloze-Generierung fehlgeschlagen: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{unit_id}/qa")
|
|
||||||
def api_get_qa(unit_id: str):
|
|
||||||
"""Get generated QA items for a unit."""
|
|
||||||
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
|
||||||
if not qa_path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
|
||||||
with open(qa_path, "r", encoding="utf-8") as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{unit_id}/mc")
|
|
||||||
def api_get_mc(unit_id: str):
|
|
||||||
"""Get generated MC questions for a unit."""
|
|
||||||
mc_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_mc.json"
|
|
||||||
if not mc_path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Keine MC-Daten gefunden.")
|
|
||||||
with open(mc_path, "r", encoding="utf-8") as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{unit_id}/cloze")
|
|
||||||
def api_get_cloze(unit_id: str):
|
|
||||||
"""Get generated cloze items for a unit."""
|
|
||||||
cloze_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_cloze.json"
|
|
||||||
if not cloze_path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Keine Cloze-Daten gefunden.")
|
|
||||||
with open(cloze_path, "r", encoding="utf-8") as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/leitner/update")
|
|
||||||
def api_update_leitner(unit_id: str, item_id: str, correct: bool):
|
|
||||||
"""Update Leitner progress for a QA item."""
|
|
||||||
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
|
||||||
if not qa_path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
|
||||||
try:
|
|
||||||
from ai_processing.qa_generator import update_leitner_progress
|
|
||||||
result = update_leitner_progress(qa_path, item_id, correct)
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{unit_id}/leitner/next")
|
|
||||||
def api_get_next_review(unit_id: str, limit: int = 5):
|
|
||||||
"""Get next Leitner review items."""
|
|
||||||
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
|
||||||
if not qa_path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
|
||||||
try:
|
|
||||||
from ai_processing.qa_generator import get_next_review_items
|
|
||||||
items = get_next_review_items(qa_path, limit=limit)
|
|
||||||
return {"items": items, "count": len(items)}
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
class StoryGeneratePayload(BaseModel):
|
|
||||||
vocabulary: List[Dict[str, Any]]
|
|
||||||
language: str = "en"
|
|
||||||
grade_level: str = "5-8"
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{unit_id}/generate-story")
|
|
||||||
def api_generate_story(unit_id: str, payload: StoryGeneratePayload):
|
|
||||||
"""Generate a short story using vocabulary words."""
|
|
||||||
lu = get_learning_unit(unit_id)
|
|
||||||
if not lu:
|
|
||||||
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
from story_generator import generate_story
|
|
||||||
result = generate_story(
|
|
||||||
vocabulary=payload.vocabulary,
|
|
||||||
language=payload.language,
|
|
||||||
grade_level=payload.grade_level,
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Story generation failed for {unit_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Story-Generierung fehlgeschlagen: {e}")
|
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# letters — Elternbriefe and Zeugnisse (certificates).
|
||||||
@@ -0,0 +1,346 @@
|
|||||||
|
"""
|
||||||
|
Letters API - Elternbrief-Verwaltung fuer BreakPilot.
|
||||||
|
|
||||||
|
Bietet Endpoints fuer:
|
||||||
|
- Speichern und Laden von Elternbriefen
|
||||||
|
- PDF-Export von Briefen
|
||||||
|
- Versenden per Email
|
||||||
|
- GFK-Integration fuer Textverbesserung
|
||||||
|
|
||||||
|
Split into:
|
||||||
|
- letters_models.py: Enums, Pydantic models, helper functions
|
||||||
|
- letters_api.py (this file): API endpoints and in-memory store
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Response, Query
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
||||||
|
try:
|
||||||
|
from services.pdf_service import generate_letter_pdf, SchoolInfo
|
||||||
|
_pdf_available = True
|
||||||
|
except (ImportError, OSError):
|
||||||
|
generate_letter_pdf = None # type: ignore
|
||||||
|
SchoolInfo = None # type: ignore
|
||||||
|
_pdf_available = False
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
LetterType,
|
||||||
|
LetterTone,
|
||||||
|
LetterStatus,
|
||||||
|
LetterCreateRequest,
|
||||||
|
LetterUpdateRequest,
|
||||||
|
LetterResponse,
|
||||||
|
LetterListResponse,
|
||||||
|
ExportPDFRequest,
|
||||||
|
ImproveRequest,
|
||||||
|
ImproveResponse,
|
||||||
|
SendEmailRequest,
|
||||||
|
SendEmailResponse,
|
||||||
|
get_type_label as _get_type_label,
|
||||||
|
get_tone_label as _get_tone_label,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/letters", tags=["letters"])
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# In-Memory Storage (Prototyp - spaeter durch DB ersetzen)
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
_letters_store: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_letter(letter_id: str) -> Dict[str, Any]:
|
||||||
|
"""Holt Brief aus dem Store."""
|
||||||
|
if letter_id not in _letters_store:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Brief mit ID {letter_id} nicht gefunden")
|
||||||
|
return _letters_store[letter_id]
|
||||||
|
|
||||||
|
|
||||||
|
def _save_letter(letter_data: Dict[str, Any]) -> str:
|
||||||
|
"""Speichert Brief und gibt ID zurueck."""
|
||||||
|
letter_id = letter_data.get("id") or str(uuid.uuid4())
|
||||||
|
letter_data["id"] = letter_id
|
||||||
|
letter_data["updated_at"] = datetime.now()
|
||||||
|
if "created_at" not in letter_data:
|
||||||
|
letter_data["created_at"] = datetime.now()
|
||||||
|
_letters_store[letter_id] = letter_data
|
||||||
|
return letter_id
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# API Endpoints
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
@router.post("/", response_model=LetterResponse)
|
||||||
|
async def create_letter(request: LetterCreateRequest):
|
||||||
|
"""Erstellt einen neuen Elternbrief."""
|
||||||
|
logger.info(f"Creating new letter for student: {request.student_name}")
|
||||||
|
|
||||||
|
letter_data = {
|
||||||
|
"recipient_name": request.recipient_name,
|
||||||
|
"recipient_address": request.recipient_address,
|
||||||
|
"student_name": request.student_name,
|
||||||
|
"student_class": request.student_class,
|
||||||
|
"subject": request.subject,
|
||||||
|
"content": request.content,
|
||||||
|
"letter_type": request.letter_type,
|
||||||
|
"tone": request.tone,
|
||||||
|
"teacher_name": request.teacher_name,
|
||||||
|
"teacher_title": request.teacher_title,
|
||||||
|
"school_info": request.school_info.model_dump() if request.school_info else None,
|
||||||
|
"legal_references": [ref.model_dump() for ref in request.legal_references] if request.legal_references else None,
|
||||||
|
"gfk_principles_applied": request.gfk_principles_applied,
|
||||||
|
"gfk_score": None,
|
||||||
|
"status": LetterStatus.DRAFT,
|
||||||
|
"pdf_path": None,
|
||||||
|
"dsms_cid": None,
|
||||||
|
"sent_at": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
letter_id = _save_letter(letter_data)
|
||||||
|
letter_data["id"] = letter_id
|
||||||
|
logger.info(f"Letter created with ID: {letter_id}")
|
||||||
|
return LetterResponse(**letter_data)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: Static routes must come BEFORE dynamic routes like /{letter_id}
|
||||||
|
@router.get("/types")
|
||||||
|
async def get_letter_types():
|
||||||
|
"""Gibt alle verfuegbaren Brieftypen zurueck."""
|
||||||
|
return {"types": [{"value": t.value, "label": _get_type_label(t)} for t in LetterType]}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tones")
|
||||||
|
async def get_letter_tones():
|
||||||
|
"""Gibt alle verfuegbaren Tonalitaeten zurueck."""
|
||||||
|
return {"tones": [{"value": t.value, "label": _get_tone_label(t)} for t in LetterTone]}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{letter_id}", response_model=LetterResponse)
|
||||||
|
async def get_letter(letter_id: str):
|
||||||
|
"""Laedt einen gespeicherten Brief."""
|
||||||
|
logger.info(f"Getting letter: {letter_id}")
|
||||||
|
letter_data = _get_letter(letter_id)
|
||||||
|
return LetterResponse(**letter_data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=LetterListResponse)
|
||||||
|
async def list_letters(
|
||||||
|
student_id: Optional[str] = Query(None),
|
||||||
|
class_name: Optional[str] = Query(None),
|
||||||
|
letter_type: Optional[LetterType] = Query(None),
|
||||||
|
status: Optional[LetterStatus] = Query(None),
|
||||||
|
page: int = Query(1, ge=1),
|
||||||
|
page_size: int = Query(20, ge=1, le=100)
|
||||||
|
):
|
||||||
|
"""Listet alle gespeicherten Briefe mit optionalen Filtern."""
|
||||||
|
logger.info("Listing letters with filters")
|
||||||
|
|
||||||
|
filtered_letters = list(_letters_store.values())
|
||||||
|
if class_name:
|
||||||
|
filtered_letters = [l for l in filtered_letters if l.get("student_class") == class_name]
|
||||||
|
if letter_type:
|
||||||
|
filtered_letters = [l for l in filtered_letters if l.get("letter_type") == letter_type]
|
||||||
|
if status:
|
||||||
|
filtered_letters = [l for l in filtered_letters if l.get("status") == status]
|
||||||
|
|
||||||
|
filtered_letters.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
||||||
|
total = len(filtered_letters)
|
||||||
|
start = (page - 1) * page_size
|
||||||
|
paginated_letters = filtered_letters[start:start + page_size]
|
||||||
|
|
||||||
|
return LetterListResponse(
|
||||||
|
letters=[LetterResponse(**l) for l in paginated_letters],
|
||||||
|
total=total, page=page, page_size=page_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{letter_id}", response_model=LetterResponse)
|
||||||
|
async def update_letter(letter_id: str, request: LetterUpdateRequest):
|
||||||
|
"""Aktualisiert einen bestehenden Brief."""
|
||||||
|
logger.info(f"Updating letter: {letter_id}")
|
||||||
|
letter_data = _get_letter(letter_id)
|
||||||
|
|
||||||
|
update_data = request.model_dump(exclude_unset=True)
|
||||||
|
for key, value in update_data.items():
|
||||||
|
if value is not None:
|
||||||
|
if key == "school_info" and value:
|
||||||
|
letter_data[key] = value if isinstance(value, dict) else value.model_dump()
|
||||||
|
elif key == "legal_references" and value:
|
||||||
|
letter_data[key] = [ref if isinstance(ref, dict) else ref.model_dump() for ref in value]
|
||||||
|
else:
|
||||||
|
letter_data[key] = value
|
||||||
|
|
||||||
|
_save_letter(letter_data)
|
||||||
|
return LetterResponse(**letter_data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{letter_id}")
|
||||||
|
async def delete_letter(letter_id: str):
|
||||||
|
"""Loescht einen Brief."""
|
||||||
|
logger.info(f"Deleting letter: {letter_id}")
|
||||||
|
if letter_id not in _letters_store:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Brief mit ID {letter_id} nicht gefunden")
|
||||||
|
del _letters_store[letter_id]
|
||||||
|
return {"message": f"Brief {letter_id} wurde geloescht"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/export-pdf")
|
||||||
|
async def export_letter_pdf(request: ExportPDFRequest):
|
||||||
|
"""Exportiert einen Brief als PDF."""
|
||||||
|
logger.info("Exporting letter as PDF")
|
||||||
|
|
||||||
|
if request.letter_id:
|
||||||
|
letter_data = _get_letter(request.letter_id)
|
||||||
|
elif request.letter_data:
|
||||||
|
letter_data = request.letter_data.model_dump()
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=400, detail="Entweder letter_id oder letter_data muss angegeben werden")
|
||||||
|
|
||||||
|
if "date" not in letter_data:
|
||||||
|
letter_data["date"] = datetime.now().strftime("%d.%m.%Y")
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_bytes = generate_letter_pdf(letter_data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error generating PDF: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Fehler bei PDF-Generierung: {str(e)}")
|
||||||
|
|
||||||
|
student_name = letter_data.get("student_name", "Brief").replace(" ", "_")
|
||||||
|
date_str = datetime.now().strftime("%Y%m%d")
|
||||||
|
filename = f"Elternbrief_{student_name}_{date_str}.pdf"
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=pdf_bytes, media_type="application/pdf",
|
||||||
|
headers={"Content-Disposition": f"attachment; filename={filename}", "Content-Length": str(len(pdf_bytes))}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{letter_id}/export-pdf")
|
||||||
|
async def export_saved_letter_pdf(letter_id: str):
|
||||||
|
"""Exportiert einen gespeicherten Brief als PDF (Kurzform)."""
|
||||||
|
return await export_letter_pdf(ExportPDFRequest(letter_id=letter_id))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/improve", response_model=ImproveResponse)
|
||||||
|
async def improve_letter_content(request: ImproveRequest):
|
||||||
|
"""Verbessert den Briefinhalt nach GFK-Prinzipien."""
|
||||||
|
logger.info("Improving letter content with GFK principles")
|
||||||
|
|
||||||
|
comm_service_url = os.getenv("COMMUNICATION_SERVICE_URL", "http://localhost:8000/v1/communication")
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
validate_response = await client.post(
|
||||||
|
f"{comm_service_url}/validate",
|
||||||
|
json={"text": request.content}, timeout=30.0
|
||||||
|
)
|
||||||
|
|
||||||
|
if validate_response.status_code != 200:
|
||||||
|
logger.warning(f"Validation service returned {validate_response.status_code}")
|
||||||
|
return ImproveResponse(
|
||||||
|
improved_content=request.content,
|
||||||
|
changes=["Verbesserungsservice nicht verfuegbar"],
|
||||||
|
gfk_score=0.5, gfk_principles_applied=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
validation_data = validate_response.json()
|
||||||
|
|
||||||
|
if validation_data.get("is_valid", False) and validation_data.get("gfk_score", 0) > 0.8:
|
||||||
|
return ImproveResponse(
|
||||||
|
improved_content=request.content,
|
||||||
|
changes=["Text entspricht bereits GFK-Standards"],
|
||||||
|
gfk_score=validation_data.get("gfk_score", 0.8),
|
||||||
|
gfk_principles_applied=validation_data.get("positive_elements", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
return ImproveResponse(
|
||||||
|
improved_content=request.content,
|
||||||
|
changes=validation_data.get("suggestions", []),
|
||||||
|
gfk_score=validation_data.get("gfk_score", 0.5),
|
||||||
|
gfk_principles_applied=validation_data.get("positive_elements", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
logger.error("Timeout while calling communication service")
|
||||||
|
return ImproveResponse(
|
||||||
|
improved_content=request.content,
|
||||||
|
changes=["Zeitueberschreitung beim Verbesserungsservice"],
|
||||||
|
gfk_score=0.5, gfk_principles_applied=[]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error improving content: {e}")
|
||||||
|
return ImproveResponse(
|
||||||
|
improved_content=request.content,
|
||||||
|
changes=[f"Fehler: {str(e)}"],
|
||||||
|
gfk_score=0.5, gfk_principles_applied=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{letter_id}/send", response_model=SendEmailResponse)
|
||||||
|
async def send_letter_email(letter_id: str, request: SendEmailRequest):
|
||||||
|
"""Versendet einen Brief per Email."""
|
||||||
|
logger.info(f"Sending letter {letter_id} to {request.recipient_email}")
|
||||||
|
letter_data = _get_letter(letter_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_attachment = None
|
||||||
|
if request.include_pdf:
|
||||||
|
letter_data["date"] = datetime.now().strftime("%d.%m.%Y")
|
||||||
|
pdf_bytes = generate_letter_pdf(letter_data)
|
||||||
|
pdf_attachment = {
|
||||||
|
"filename": f"Elternbrief_{letter_data.get('student_name', 'Brief').replace(' ', '_')}.pdf",
|
||||||
|
"content": pdf_bytes.hex(),
|
||||||
|
"content_type": "application/pdf"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
logger.info(f"Would send email: {letter_data.get('subject')} to {request.recipient_email}")
|
||||||
|
letter_data["status"] = LetterStatus.SENT
|
||||||
|
letter_data["sent_at"] = datetime.now()
|
||||||
|
_save_letter(letter_data)
|
||||||
|
|
||||||
|
return SendEmailResponse(
|
||||||
|
success=True,
|
||||||
|
message=f"Brief wurde an {request.recipient_email} gesendet",
|
||||||
|
sent_at=datetime.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error sending email: {e}")
|
||||||
|
return SendEmailResponse(success=False, message=f"Fehler beim Versenden: {str(e)}", sent_at=None)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/student/{student_id}", response_model=LetterListResponse)
|
||||||
|
async def get_letters_for_student(
|
||||||
|
student_id: str,
|
||||||
|
page: int = Query(1, ge=1),
|
||||||
|
page_size: int = Query(20, ge=1, le=100)
|
||||||
|
):
|
||||||
|
"""Laedt alle Briefe fuer einen bestimmten Schueler."""
|
||||||
|
logger.info(f"Getting letters for student: {student_id}")
|
||||||
|
|
||||||
|
filtered_letters = [
|
||||||
|
l for l in _letters_store.values()
|
||||||
|
if student_id.lower() in l.get("student_name", "").lower()
|
||||||
|
]
|
||||||
|
|
||||||
|
filtered_letters.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
||||||
|
total = len(filtered_letters)
|
||||||
|
start = (page - 1) * page_size
|
||||||
|
paginated_letters = filtered_letters[start:start + page_size]
|
||||||
|
|
||||||
|
return LetterListResponse(
|
||||||
|
letters=[LetterResponse(**l) for l in paginated_letters],
|
||||||
|
total=total, page=page, page_size=page_size
|
||||||
|
)
|
||||||
@@ -0,0 +1,340 @@
|
|||||||
|
"""
|
||||||
|
Certificates API - Zeugnisverwaltung fuer BreakPilot.
|
||||||
|
|
||||||
|
Split into:
|
||||||
|
- certificates_models.py: Enums, Pydantic models, helper functions
|
||||||
|
- certificates_api.py (this file): API endpoints and in-memory store
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict, List, Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Response, Query
|
||||||
|
|
||||||
|
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
||||||
|
try:
|
||||||
|
from services.pdf_service import generate_certificate_pdf, SchoolInfo
|
||||||
|
_pdf_available = True
|
||||||
|
except (ImportError, OSError):
|
||||||
|
generate_certificate_pdf = None # type: ignore
|
||||||
|
SchoolInfo = None # type: ignore
|
||||||
|
_pdf_available = False
|
||||||
|
|
||||||
|
from .certificates_models import (
|
||||||
|
CertificateType,
|
||||||
|
CertificateStatus,
|
||||||
|
BehaviorGrade,
|
||||||
|
CertificateCreateRequest,
|
||||||
|
CertificateUpdateRequest,
|
||||||
|
CertificateResponse,
|
||||||
|
CertificateListResponse,
|
||||||
|
GradeStatistics,
|
||||||
|
get_type_label as _get_type_label,
|
||||||
|
calculate_average as _calculate_average,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/certificates", tags=["certificates"])
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# In-Memory Storage (Prototyp - spaeter durch DB ersetzen)
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
_certificates_store: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_certificate(cert_id: str) -> Dict[str, Any]:
|
||||||
|
"""Holt Zeugnis aus dem Store."""
|
||||||
|
if cert_id not in _certificates_store:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Zeugnis mit ID {cert_id} nicht gefunden")
|
||||||
|
return _certificates_store[cert_id]
|
||||||
|
|
||||||
|
|
||||||
|
def _save_certificate(cert_data: Dict[str, Any]) -> str:
|
||||||
|
"""Speichert Zeugnis und gibt ID zurueck."""
|
||||||
|
cert_id = cert_data.get("id") or str(uuid.uuid4())
|
||||||
|
cert_data["id"] = cert_id
|
||||||
|
cert_data["updated_at"] = datetime.now()
|
||||||
|
if "created_at" not in cert_data:
|
||||||
|
cert_data["created_at"] = datetime.now()
|
||||||
|
_certificates_store[cert_id] = cert_data
|
||||||
|
return cert_id
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# API Endpoints
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
@router.post("/", response_model=CertificateResponse)
|
||||||
|
async def create_certificate(request: CertificateCreateRequest):
|
||||||
|
"""Erstellt ein neues Zeugnis."""
|
||||||
|
logger.info(f"Creating new certificate for student: {request.student_name}")
|
||||||
|
|
||||||
|
subjects_list = [s.model_dump() for s in request.subjects]
|
||||||
|
|
||||||
|
cert_data = {
|
||||||
|
"student_id": request.student_id,
|
||||||
|
"student_name": request.student_name,
|
||||||
|
"student_birthdate": request.student_birthdate,
|
||||||
|
"student_class": request.student_class,
|
||||||
|
"school_year": request.school_year,
|
||||||
|
"certificate_type": request.certificate_type,
|
||||||
|
"subjects": subjects_list,
|
||||||
|
"attendance": request.attendance.model_dump(),
|
||||||
|
"remarks": request.remarks,
|
||||||
|
"class_teacher": request.class_teacher,
|
||||||
|
"principal": request.principal,
|
||||||
|
"school_info": request.school_info.model_dump() if request.school_info else None,
|
||||||
|
"issue_date": request.issue_date or datetime.now().strftime("%d.%m.%Y"),
|
||||||
|
"social_behavior": request.social_behavior,
|
||||||
|
"work_behavior": request.work_behavior,
|
||||||
|
"status": CertificateStatus.DRAFT,
|
||||||
|
"average_grade": _calculate_average(subjects_list),
|
||||||
|
"pdf_path": None,
|
||||||
|
"dsms_cid": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
cert_id = _save_certificate(cert_data)
|
||||||
|
cert_data["id"] = cert_id
|
||||||
|
logger.info(f"Certificate created with ID: {cert_id}")
|
||||||
|
return CertificateResponse(**cert_data)
|
||||||
|
|
||||||
|
|
||||||
|
# IMPORTANT: Static routes must be defined BEFORE dynamic /{cert_id} route
|
||||||
|
@router.get("/types")
|
||||||
|
async def get_certificate_types():
|
||||||
|
"""Gibt alle verfuegbaren Zeugnistypen zurueck."""
|
||||||
|
return {"types": [{"value": t.value, "label": _get_type_label(t)} for t in CertificateType]}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/behavior-grades")
|
||||||
|
async def get_behavior_grades():
|
||||||
|
"""Gibt alle verfuegbaren Verhaltensnoten zurueck."""
|
||||||
|
labels = {
|
||||||
|
BehaviorGrade.A: "A - Sehr gut", BehaviorGrade.B: "B - Gut",
|
||||||
|
BehaviorGrade.C: "C - Befriedigend", BehaviorGrade.D: "D - Verbesserungswuerdig"
|
||||||
|
}
|
||||||
|
return {"grades": [{"value": g.value, "label": labels[g]} for g in BehaviorGrade]}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{cert_id}", response_model=CertificateResponse)
|
||||||
|
async def get_certificate(cert_id: str):
|
||||||
|
"""Laedt ein gespeichertes Zeugnis."""
|
||||||
|
logger.info(f"Getting certificate: {cert_id}")
|
||||||
|
return CertificateResponse(**_get_certificate(cert_id))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=CertificateListResponse)
|
||||||
|
async def list_certificates(
|
||||||
|
student_id: Optional[str] = Query(None),
|
||||||
|
class_name: Optional[str] = Query(None),
|
||||||
|
school_year: Optional[str] = Query(None),
|
||||||
|
certificate_type: Optional[CertificateType] = Query(None),
|
||||||
|
status: Optional[CertificateStatus] = Query(None),
|
||||||
|
page: int = Query(1, ge=1),
|
||||||
|
page_size: int = Query(20, ge=1, le=100)
|
||||||
|
):
|
||||||
|
"""Listet alle gespeicherten Zeugnisse mit optionalen Filtern."""
|
||||||
|
logger.info("Listing certificates with filters")
|
||||||
|
|
||||||
|
filtered_certs = list(_certificates_store.values())
|
||||||
|
if student_id:
|
||||||
|
filtered_certs = [c for c in filtered_certs if c.get("student_id") == student_id]
|
||||||
|
if class_name:
|
||||||
|
filtered_certs = [c for c in filtered_certs if c.get("student_class") == class_name]
|
||||||
|
if school_year:
|
||||||
|
filtered_certs = [c for c in filtered_certs if c.get("school_year") == school_year]
|
||||||
|
if certificate_type:
|
||||||
|
filtered_certs = [c for c in filtered_certs if c.get("certificate_type") == certificate_type]
|
||||||
|
if status:
|
||||||
|
filtered_certs = [c for c in filtered_certs if c.get("status") == status]
|
||||||
|
|
||||||
|
filtered_certs.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
||||||
|
total = len(filtered_certs)
|
||||||
|
start = (page - 1) * page_size
|
||||||
|
paginated_certs = filtered_certs[start:start + page_size]
|
||||||
|
|
||||||
|
return CertificateListResponse(
|
||||||
|
certificates=[CertificateResponse(**c) for c in paginated_certs],
|
||||||
|
total=total, page=page, page_size=page_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{cert_id}", response_model=CertificateResponse)
|
||||||
|
async def update_certificate(cert_id: str, request: CertificateUpdateRequest):
|
||||||
|
"""Aktualisiert ein bestehendes Zeugnis."""
|
||||||
|
logger.info(f"Updating certificate: {cert_id}")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
|
||||||
|
if cert_data.get("status") in [CertificateStatus.ISSUED, CertificateStatus.ARCHIVED]:
|
||||||
|
raise HTTPException(status_code=400, detail="Zeugnis wurde bereits ausgestellt und kann nicht mehr bearbeitet werden")
|
||||||
|
|
||||||
|
update_data = request.model_dump(exclude_unset=True)
|
||||||
|
for key, value in update_data.items():
|
||||||
|
if value is not None:
|
||||||
|
if key == "subjects":
|
||||||
|
cert_data[key] = [s if isinstance(s, dict) else s.model_dump() for s in value]
|
||||||
|
cert_data["average_grade"] = _calculate_average(cert_data["subjects"])
|
||||||
|
elif key == "attendance":
|
||||||
|
cert_data[key] = value if isinstance(value, dict) else value.model_dump()
|
||||||
|
else:
|
||||||
|
cert_data[key] = value
|
||||||
|
|
||||||
|
_save_certificate(cert_data)
|
||||||
|
return CertificateResponse(**cert_data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{cert_id}")
|
||||||
|
async def delete_certificate(cert_id: str):
|
||||||
|
"""Loescht ein Zeugnis. Nur Entwuerfe koennen geloescht werden."""
|
||||||
|
logger.info(f"Deleting certificate: {cert_id}")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
if cert_data.get("status") != CertificateStatus.DRAFT:
|
||||||
|
raise HTTPException(status_code=400, detail="Nur Zeugnis-Entwuerfe koennen geloescht werden")
|
||||||
|
del _certificates_store[cert_id]
|
||||||
|
return {"message": f"Zeugnis {cert_id} wurde geloescht"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{cert_id}/export-pdf")
|
||||||
|
async def export_certificate_pdf(cert_id: str):
|
||||||
|
"""Exportiert ein Zeugnis als PDF."""
|
||||||
|
logger.info(f"Exporting certificate {cert_id} as PDF")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_bytes = generate_certificate_pdf(cert_data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error generating PDF: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Fehler bei PDF-Generierung: {str(e)}")
|
||||||
|
|
||||||
|
student_name = cert_data.get("student_name", "Zeugnis").replace(" ", "_")
|
||||||
|
school_year = cert_data.get("school_year", "").replace("/", "-")
|
||||||
|
cert_type = cert_data.get("certificate_type", "zeugnis")
|
||||||
|
filename = f"Zeugnis_{student_name}_{cert_type}_{school_year}.pdf"
|
||||||
|
|
||||||
|
from urllib.parse import quote
|
||||||
|
filename_ascii = filename.encode('ascii', 'replace').decode('ascii')
|
||||||
|
filename_encoded = quote(filename, safe='')
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=pdf_bytes, media_type="application/pdf",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f"attachment; filename=\"{filename_ascii}\"; filename*=UTF-8''{filename_encoded}",
|
||||||
|
"Content-Length": str(len(pdf_bytes))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{cert_id}/submit-review")
|
||||||
|
async def submit_for_review(cert_id: str):
|
||||||
|
"""Reicht Zeugnis zur Pruefung ein."""
|
||||||
|
logger.info(f"Submitting certificate {cert_id} for review")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
if cert_data.get("status") != CertificateStatus.DRAFT:
|
||||||
|
raise HTTPException(status_code=400, detail="Nur Entwuerfe koennen zur Pruefung eingereicht werden")
|
||||||
|
if not cert_data.get("subjects"):
|
||||||
|
raise HTTPException(status_code=400, detail="Keine Fachnoten eingetragen")
|
||||||
|
cert_data["status"] = CertificateStatus.REVIEW
|
||||||
|
_save_certificate(cert_data)
|
||||||
|
return {"message": "Zeugnis wurde zur Pruefung eingereicht", "status": CertificateStatus.REVIEW}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{cert_id}/approve")
|
||||||
|
async def approve_certificate(cert_id: str):
|
||||||
|
"""Genehmigt ein Zeugnis."""
|
||||||
|
logger.info(f"Approving certificate {cert_id}")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
if cert_data.get("status") != CertificateStatus.REVIEW:
|
||||||
|
raise HTTPException(status_code=400, detail="Nur Zeugnisse in Pruefung koennen genehmigt werden")
|
||||||
|
cert_data["status"] = CertificateStatus.APPROVED
|
||||||
|
_save_certificate(cert_data)
|
||||||
|
return {"message": "Zeugnis wurde genehmigt", "status": CertificateStatus.APPROVED}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{cert_id}/issue")
|
||||||
|
async def issue_certificate(cert_id: str):
|
||||||
|
"""Stellt ein Zeugnis offiziell aus."""
|
||||||
|
logger.info(f"Issuing certificate {cert_id}")
|
||||||
|
cert_data = _get_certificate(cert_id)
|
||||||
|
if cert_data.get("status") != CertificateStatus.APPROVED:
|
||||||
|
raise HTTPException(status_code=400, detail="Nur genehmigte Zeugnisse koennen ausgestellt werden")
|
||||||
|
cert_data["status"] = CertificateStatus.ISSUED
|
||||||
|
cert_data["issue_date"] = datetime.now().strftime("%d.%m.%Y")
|
||||||
|
_save_certificate(cert_data)
|
||||||
|
return {"message": "Zeugnis wurde ausgestellt", "status": CertificateStatus.ISSUED, "issue_date": cert_data["issue_date"]}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/student/{student_id}", response_model=CertificateListResponse)
|
||||||
|
async def get_certificates_for_student(
|
||||||
|
student_id: str, page: int = Query(1, ge=1), page_size: int = Query(20, ge=1, le=100)
|
||||||
|
):
|
||||||
|
"""Laedt alle Zeugnisse fuer einen bestimmten Schueler."""
|
||||||
|
logger.info(f"Getting certificates for student: {student_id}")
|
||||||
|
filtered_certs = [c for c in _certificates_store.values() if c.get("student_id") == student_id]
|
||||||
|
filtered_certs.sort(key=lambda x: (x.get("school_year", ""), x.get("certificate_type", "")), reverse=True)
|
||||||
|
total = len(filtered_certs)
|
||||||
|
start = (page - 1) * page_size
|
||||||
|
paginated_certs = filtered_certs[start:start + page_size]
|
||||||
|
return CertificateListResponse(
|
||||||
|
certificates=[CertificateResponse(**c) for c in paginated_certs],
|
||||||
|
total=total, page=page, page_size=page_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/class/{class_name}/statistics", response_model=GradeStatistics)
|
||||||
|
async def get_class_statistics(
|
||||||
|
class_name: str,
|
||||||
|
school_year: str = Query(..., description="Schuljahr"),
|
||||||
|
certificate_type: CertificateType = Query(CertificateType.HALBJAHR)
|
||||||
|
):
|
||||||
|
"""Berechnet Notenstatistiken fuer eine Klasse."""
|
||||||
|
logger.info(f"Calculating statistics for class {class_name}")
|
||||||
|
|
||||||
|
class_certs = [
|
||||||
|
c for c in _certificates_store.values()
|
||||||
|
if c.get("student_class") == class_name
|
||||||
|
and c.get("school_year") == school_year
|
||||||
|
and c.get("certificate_type") == certificate_type
|
||||||
|
]
|
||||||
|
|
||||||
|
if not class_certs:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Keine Zeugnisse fuer Klasse {class_name} im Schuljahr {school_year} gefunden")
|
||||||
|
|
||||||
|
all_grades: List[float] = []
|
||||||
|
subject_grades: Dict[str, List[float]] = {}
|
||||||
|
grade_counts = {"1": 0, "2": 0, "3": 0, "4": 0, "5": 0, "6": 0}
|
||||||
|
|
||||||
|
for cert in class_certs:
|
||||||
|
avg = cert.get("average_grade")
|
||||||
|
if avg:
|
||||||
|
all_grades.append(avg)
|
||||||
|
rounded = str(round(avg))
|
||||||
|
if rounded in grade_counts:
|
||||||
|
grade_counts[rounded] += 1
|
||||||
|
|
||||||
|
for subject in cert.get("subjects", []):
|
||||||
|
name = subject.get("name")
|
||||||
|
grade_str = subject.get("grade")
|
||||||
|
try:
|
||||||
|
grade = float(grade_str)
|
||||||
|
if name not in subject_grades:
|
||||||
|
subject_grades[name] = []
|
||||||
|
subject_grades[name].append(grade)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
subject_averages = {
|
||||||
|
name: round(sum(grades) / len(grades), 2)
|
||||||
|
for name, grades in subject_grades.items() if grades
|
||||||
|
}
|
||||||
|
|
||||||
|
return GradeStatistics(
|
||||||
|
class_name=class_name, school_year=school_year,
|
||||||
|
certificate_type=certificate_type, student_count=len(class_certs),
|
||||||
|
average_grade=round(sum(all_grades) / len(all_grades), 2) if all_grades else 0.0,
|
||||||
|
grade_distribution=grade_counts, subject_averages=subject_averages
|
||||||
|
)
|
||||||
@@ -0,0 +1,184 @@
|
|||||||
|
"""
|
||||||
|
Certificates Models - Pydantic models and enums for Zeugnisverwaltung.
|
||||||
|
"""
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, List, Dict
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Enums
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
class CertificateType(str, Enum):
|
||||||
|
"""Typen von Zeugnissen."""
|
||||||
|
HALBJAHR = "halbjahr"
|
||||||
|
JAHRES = "jahres"
|
||||||
|
ABSCHLUSS = "abschluss"
|
||||||
|
ABGANG = "abgang"
|
||||||
|
UEBERGANG = "uebergang"
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateStatus(str, Enum):
|
||||||
|
"""Status eines Zeugnisses."""
|
||||||
|
DRAFT = "draft"
|
||||||
|
REVIEW = "review"
|
||||||
|
APPROVED = "approved"
|
||||||
|
ISSUED = "issued"
|
||||||
|
ARCHIVED = "archived"
|
||||||
|
|
||||||
|
|
||||||
|
class GradeType(str, Enum):
|
||||||
|
"""Notentyp."""
|
||||||
|
NUMERIC = "numeric"
|
||||||
|
POINTS = "points"
|
||||||
|
TEXT = "text"
|
||||||
|
|
||||||
|
|
||||||
|
class BehaviorGrade(str, Enum):
|
||||||
|
"""Verhaltens-/Arbeitsnoten."""
|
||||||
|
A = "A"
|
||||||
|
B = "B"
|
||||||
|
C = "C"
|
||||||
|
D = "D"
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pydantic Models
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
class SchoolInfoModel(BaseModel):
|
||||||
|
"""Schulinformationen fuer Zeugnis."""
|
||||||
|
name: str
|
||||||
|
address: str
|
||||||
|
phone: str
|
||||||
|
email: str
|
||||||
|
website: Optional[str] = None
|
||||||
|
principal: Optional[str] = None
|
||||||
|
logo_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SubjectGrade(BaseModel):
|
||||||
|
"""Note fuer ein Fach."""
|
||||||
|
name: str = Field(..., description="Fachname")
|
||||||
|
grade: str = Field(..., description="Note (1-6 oder A-D)")
|
||||||
|
points: Optional[int] = Field(None, description="Punkte (Oberstufe, 0-15)")
|
||||||
|
note: Optional[str] = Field(None, description="Bemerkung zum Fach")
|
||||||
|
|
||||||
|
|
||||||
|
class AttendanceInfo(BaseModel):
|
||||||
|
"""Anwesenheitsinformationen."""
|
||||||
|
days_absent: int = Field(0, description="Fehlende Tage gesamt")
|
||||||
|
days_excused: int = Field(0, description="Entschuldigte Tage")
|
||||||
|
days_unexcused: int = Field(0, description="Unentschuldigte Tage")
|
||||||
|
hours_absent: Optional[int] = Field(None, description="Fehlstunden gesamt")
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateCreateRequest(BaseModel):
|
||||||
|
"""Request zum Erstellen eines neuen Zeugnisses."""
|
||||||
|
student_id: str = Field(..., description="ID des Schuelers")
|
||||||
|
student_name: str = Field(..., description="Name des Schuelers")
|
||||||
|
student_birthdate: str = Field(..., description="Geburtsdatum")
|
||||||
|
student_class: str = Field(..., description="Klasse")
|
||||||
|
school_year: str = Field(..., description="Schuljahr (z.B. '2024/2025')")
|
||||||
|
certificate_type: CertificateType = Field(..., description="Art des Zeugnisses")
|
||||||
|
subjects: List[SubjectGrade] = Field(..., description="Fachnoten")
|
||||||
|
attendance: AttendanceInfo = Field(default_factory=AttendanceInfo)
|
||||||
|
remarks: Optional[str] = Field(None, description="Bemerkungen")
|
||||||
|
class_teacher: str = Field(..., description="Klassenlehrer/in")
|
||||||
|
principal: str = Field(..., description="Schulleiter/in")
|
||||||
|
school_info: Optional[SchoolInfoModel] = Field(None)
|
||||||
|
issue_date: Optional[str] = Field(None, description="Ausstellungsdatum")
|
||||||
|
social_behavior: Optional[BehaviorGrade] = Field(None)
|
||||||
|
work_behavior: Optional[BehaviorGrade] = Field(None)
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateUpdateRequest(BaseModel):
|
||||||
|
"""Request zum Aktualisieren eines Zeugnisses."""
|
||||||
|
subjects: Optional[List[SubjectGrade]] = None
|
||||||
|
attendance: Optional[AttendanceInfo] = None
|
||||||
|
remarks: Optional[str] = None
|
||||||
|
class_teacher: Optional[str] = None
|
||||||
|
principal: Optional[str] = None
|
||||||
|
social_behavior: Optional[BehaviorGrade] = None
|
||||||
|
work_behavior: Optional[BehaviorGrade] = None
|
||||||
|
status: Optional[CertificateStatus] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateResponse(BaseModel):
|
||||||
|
"""Response mit Zeugnisdaten."""
|
||||||
|
id: str
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
student_birthdate: str
|
||||||
|
student_class: str
|
||||||
|
school_year: str
|
||||||
|
certificate_type: CertificateType
|
||||||
|
subjects: List[SubjectGrade]
|
||||||
|
attendance: AttendanceInfo
|
||||||
|
remarks: Optional[str]
|
||||||
|
class_teacher: str
|
||||||
|
principal: str
|
||||||
|
school_info: Optional[SchoolInfoModel]
|
||||||
|
issue_date: Optional[str]
|
||||||
|
social_behavior: Optional[BehaviorGrade]
|
||||||
|
work_behavior: Optional[BehaviorGrade]
|
||||||
|
status: CertificateStatus
|
||||||
|
average_grade: Optional[float]
|
||||||
|
pdf_path: Optional[str]
|
||||||
|
dsms_cid: Optional[str]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateListResponse(BaseModel):
|
||||||
|
"""Response mit Liste von Zeugnissen."""
|
||||||
|
certificates: List[CertificateResponse]
|
||||||
|
total: int
|
||||||
|
page: int
|
||||||
|
page_size: int
|
||||||
|
|
||||||
|
|
||||||
|
class GradeStatistics(BaseModel):
|
||||||
|
"""Notenstatistiken fuer eine Klasse."""
|
||||||
|
class_name: str
|
||||||
|
school_year: str
|
||||||
|
certificate_type: CertificateType
|
||||||
|
student_count: int
|
||||||
|
average_grade: float
|
||||||
|
grade_distribution: Dict[str, int]
|
||||||
|
subject_averages: Dict[str, float]
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Helper Functions
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
def get_type_label(cert_type: CertificateType) -> str:
|
||||||
|
"""Gibt menschenlesbare Labels fuer Zeugnistypen zurueck."""
|
||||||
|
labels = {
|
||||||
|
CertificateType.HALBJAHR: "Halbjahreszeugnis",
|
||||||
|
CertificateType.JAHRES: "Jahreszeugnis",
|
||||||
|
CertificateType.ABSCHLUSS: "Abschlusszeugnis",
|
||||||
|
CertificateType.ABGANG: "Abgangszeugnis",
|
||||||
|
CertificateType.UEBERGANG: "Uebergangszeugnis",
|
||||||
|
}
|
||||||
|
return labels.get(cert_type, cert_type.value)
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_average(subjects: List[Dict]) -> Optional[float]:
|
||||||
|
"""Berechnet Notendurchschnitt."""
|
||||||
|
numeric_grades = []
|
||||||
|
for subject in subjects:
|
||||||
|
grade = subject.get("grade", "")
|
||||||
|
try:
|
||||||
|
numeric = float(grade)
|
||||||
|
if 1 <= numeric <= 6:
|
||||||
|
numeric_grades.append(numeric)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
if numeric_grades:
|
||||||
|
return round(sum(numeric_grades) / len(numeric_grades), 2)
|
||||||
|
return None
|
||||||
@@ -0,0 +1,195 @@
|
|||||||
|
"""
|
||||||
|
Letters Models - Pydantic models and enums for Elternbrief-Verwaltung.
|
||||||
|
"""
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, List
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Enums
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
class LetterType(str, Enum):
|
||||||
|
"""Typen von Elternbriefen."""
|
||||||
|
GENERAL = "general"
|
||||||
|
HALBJAHR = "halbjahr"
|
||||||
|
FEHLZEITEN = "fehlzeiten"
|
||||||
|
ELTERNABEND = "elternabend"
|
||||||
|
LOB = "lob"
|
||||||
|
CUSTOM = "custom"
|
||||||
|
|
||||||
|
|
||||||
|
class LetterTone(str, Enum):
|
||||||
|
"""Tonalitaet der Briefe."""
|
||||||
|
FORMAL = "formal"
|
||||||
|
PROFESSIONAL = "professional"
|
||||||
|
WARM = "warm"
|
||||||
|
CONCERNED = "concerned"
|
||||||
|
APPRECIATIVE = "appreciative"
|
||||||
|
|
||||||
|
|
||||||
|
class LetterStatus(str, Enum):
|
||||||
|
"""Status eines Briefes."""
|
||||||
|
DRAFT = "draft"
|
||||||
|
SENT = "sent"
|
||||||
|
ARCHIVED = "archived"
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pydantic Models
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
class SchoolInfoModel(BaseModel):
|
||||||
|
"""Schulinformationen fuer Briefkopf."""
|
||||||
|
name: str
|
||||||
|
address: str
|
||||||
|
phone: str
|
||||||
|
email: str
|
||||||
|
website: Optional[str] = None
|
||||||
|
principal: Optional[str] = None
|
||||||
|
logo_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LegalReferenceModel(BaseModel):
|
||||||
|
"""Rechtliche Referenz."""
|
||||||
|
law: str
|
||||||
|
paragraph: str
|
||||||
|
title: str
|
||||||
|
summary: Optional[str] = None
|
||||||
|
relevance: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LetterCreateRequest(BaseModel):
|
||||||
|
"""Request zum Erstellen eines neuen Briefes."""
|
||||||
|
recipient_name: str = Field(..., description="Name des Empfaengers")
|
||||||
|
recipient_address: str = Field(..., description="Adresse des Empfaengers")
|
||||||
|
student_name: str = Field(..., description="Name des Schuelers")
|
||||||
|
student_class: str = Field(..., description="Klasse des Schuelers")
|
||||||
|
subject: str = Field(..., description="Betreff des Briefes")
|
||||||
|
content: str = Field(..., description="Inhalt des Briefes")
|
||||||
|
letter_type: LetterType = Field(LetterType.GENERAL, description="Art des Briefes")
|
||||||
|
tone: LetterTone = Field(LetterTone.PROFESSIONAL, description="Tonalitaet des Briefes")
|
||||||
|
teacher_name: str = Field(..., description="Name des Lehrers")
|
||||||
|
teacher_title: Optional[str] = Field(None, description="Titel des Lehrers")
|
||||||
|
school_info: Optional[SchoolInfoModel] = Field(None, description="Schulinformationen")
|
||||||
|
legal_references: Optional[List[LegalReferenceModel]] = Field(None, description="Rechtliche Referenzen")
|
||||||
|
gfk_principles_applied: Optional[List[str]] = Field(None, description="Angewandte GFK-Prinzipien")
|
||||||
|
|
||||||
|
|
||||||
|
class LetterUpdateRequest(BaseModel):
|
||||||
|
"""Request zum Aktualisieren eines Briefes."""
|
||||||
|
recipient_name: Optional[str] = None
|
||||||
|
recipient_address: Optional[str] = None
|
||||||
|
student_name: Optional[str] = None
|
||||||
|
student_class: Optional[str] = None
|
||||||
|
subject: Optional[str] = None
|
||||||
|
content: Optional[str] = None
|
||||||
|
letter_type: Optional[LetterType] = None
|
||||||
|
tone: Optional[LetterTone] = None
|
||||||
|
teacher_name: Optional[str] = None
|
||||||
|
teacher_title: Optional[str] = None
|
||||||
|
school_info: Optional[SchoolInfoModel] = None
|
||||||
|
legal_references: Optional[List[LegalReferenceModel]] = None
|
||||||
|
gfk_principles_applied: Optional[List[str]] = None
|
||||||
|
status: Optional[LetterStatus] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LetterResponse(BaseModel):
|
||||||
|
"""Response mit Briefdaten."""
|
||||||
|
id: str
|
||||||
|
recipient_name: str
|
||||||
|
recipient_address: str
|
||||||
|
student_name: str
|
||||||
|
student_class: str
|
||||||
|
subject: str
|
||||||
|
content: str
|
||||||
|
letter_type: LetterType
|
||||||
|
tone: LetterTone
|
||||||
|
teacher_name: str
|
||||||
|
teacher_title: Optional[str]
|
||||||
|
school_info: Optional[SchoolInfoModel]
|
||||||
|
legal_references: Optional[List[LegalReferenceModel]]
|
||||||
|
gfk_principles_applied: Optional[List[str]]
|
||||||
|
gfk_score: Optional[float]
|
||||||
|
status: LetterStatus
|
||||||
|
pdf_path: Optional[str]
|
||||||
|
dsms_cid: Optional[str]
|
||||||
|
sent_at: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class LetterListResponse(BaseModel):
|
||||||
|
"""Response mit Liste von Briefen."""
|
||||||
|
letters: List[LetterResponse]
|
||||||
|
total: int
|
||||||
|
page: int
|
||||||
|
page_size: int
|
||||||
|
|
||||||
|
|
||||||
|
class ExportPDFRequest(BaseModel):
|
||||||
|
"""Request zum PDF-Export."""
|
||||||
|
letter_id: Optional[str] = Field(None, description="ID eines gespeicherten Briefes")
|
||||||
|
letter_data: Optional[LetterCreateRequest] = Field(None, description="Oder direkte Briefdaten")
|
||||||
|
|
||||||
|
|
||||||
|
class ImproveRequest(BaseModel):
|
||||||
|
"""Request zur GFK-Verbesserung."""
|
||||||
|
content: str = Field(..., description="Text zur Verbesserung")
|
||||||
|
communication_type: Optional[str] = Field("general_info", description="Art der Kommunikation")
|
||||||
|
tone: Optional[str] = Field("professional", description="Gewuenschte Tonalitaet")
|
||||||
|
|
||||||
|
|
||||||
|
class ImproveResponse(BaseModel):
|
||||||
|
"""Response mit verbessertem Text."""
|
||||||
|
improved_content: str
|
||||||
|
changes: List[str]
|
||||||
|
gfk_score: float
|
||||||
|
gfk_principles_applied: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class SendEmailRequest(BaseModel):
|
||||||
|
"""Request zum Email-Versand."""
|
||||||
|
letter_id: str
|
||||||
|
recipient_email: str
|
||||||
|
cc_emails: Optional[List[str]] = None
|
||||||
|
include_pdf: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class SendEmailResponse(BaseModel):
|
||||||
|
"""Response nach Email-Versand."""
|
||||||
|
success: bool
|
||||||
|
message: str
|
||||||
|
sent_at: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Helper Functions
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
def get_type_label(letter_type: LetterType) -> str:
|
||||||
|
"""Gibt menschenlesbare Labels fuer Brieftypen zurueck."""
|
||||||
|
labels = {
|
||||||
|
LetterType.GENERAL: "Allgemeine Information",
|
||||||
|
LetterType.HALBJAHR: "Halbjahresinformation",
|
||||||
|
LetterType.FEHLZEITEN: "Fehlzeiten-Mitteilung",
|
||||||
|
LetterType.ELTERNABEND: "Einladung Elternabend",
|
||||||
|
LetterType.LOB: "Positives Feedback",
|
||||||
|
LetterType.CUSTOM: "Benutzerdefiniert",
|
||||||
|
}
|
||||||
|
return labels.get(letter_type, letter_type.value)
|
||||||
|
|
||||||
|
|
||||||
|
def get_tone_label(tone: LetterTone) -> str:
|
||||||
|
"""Gibt menschenlesbare Labels fuer Tonalitaeten zurueck."""
|
||||||
|
labels = {
|
||||||
|
LetterTone.FORMAL: "Sehr foermlich",
|
||||||
|
LetterTone.PROFESSIONAL: "Professionell-freundlich",
|
||||||
|
LetterTone.WARM: "Warmherzig",
|
||||||
|
LetterTone.CONCERNED: "Besorgt",
|
||||||
|
LetterTone.APPRECIATIVE: "Wertschaetzend",
|
||||||
|
}
|
||||||
|
return labels.get(tone, tone.value)
|
||||||
@@ -1,346 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to letters/api.py
|
||||||
Letters API - Elternbrief-Verwaltung fuer BreakPilot.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Bietet Endpoints fuer:
|
_sys.modules[__name__] = _importlib.import_module("letters.api")
|
||||||
- Speichern und Laden von Elternbriefen
|
|
||||||
- PDF-Export von Briefen
|
|
||||||
- Versenden per Email
|
|
||||||
- GFK-Integration fuer Textverbesserung
|
|
||||||
|
|
||||||
Split into:
|
|
||||||
- letters_models.py: Enums, Pydantic models, helper functions
|
|
||||||
- letters_api.py (this file): API endpoints and in-memory store
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Response, Query
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
# PDF service requires WeasyPrint with system libraries - make optional for CI
|
|
||||||
try:
|
|
||||||
from services.pdf_service import generate_letter_pdf, SchoolInfo
|
|
||||||
_pdf_available = True
|
|
||||||
except (ImportError, OSError):
|
|
||||||
generate_letter_pdf = None # type: ignore
|
|
||||||
SchoolInfo = None # type: ignore
|
|
||||||
_pdf_available = False
|
|
||||||
|
|
||||||
from letters_models import (
|
|
||||||
LetterType,
|
|
||||||
LetterTone,
|
|
||||||
LetterStatus,
|
|
||||||
LetterCreateRequest,
|
|
||||||
LetterUpdateRequest,
|
|
||||||
LetterResponse,
|
|
||||||
LetterListResponse,
|
|
||||||
ExportPDFRequest,
|
|
||||||
ImproveRequest,
|
|
||||||
ImproveResponse,
|
|
||||||
SendEmailRequest,
|
|
||||||
SendEmailResponse,
|
|
||||||
get_type_label as _get_type_label,
|
|
||||||
get_tone_label as _get_tone_label,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/letters", tags=["letters"])
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# In-Memory Storage (Prototyp - spaeter durch DB ersetzen)
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
_letters_store: Dict[str, Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def _get_letter(letter_id: str) -> Dict[str, Any]:
|
|
||||||
"""Holt Brief aus dem Store."""
|
|
||||||
if letter_id not in _letters_store:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Brief mit ID {letter_id} nicht gefunden")
|
|
||||||
return _letters_store[letter_id]
|
|
||||||
|
|
||||||
|
|
||||||
def _save_letter(letter_data: Dict[str, Any]) -> str:
|
|
||||||
"""Speichert Brief und gibt ID zurueck."""
|
|
||||||
letter_id = letter_data.get("id") or str(uuid.uuid4())
|
|
||||||
letter_data["id"] = letter_id
|
|
||||||
letter_data["updated_at"] = datetime.now()
|
|
||||||
if "created_at" not in letter_data:
|
|
||||||
letter_data["created_at"] = datetime.now()
|
|
||||||
_letters_store[letter_id] = letter_data
|
|
||||||
return letter_id
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# API Endpoints
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
@router.post("/", response_model=LetterResponse)
|
|
||||||
async def create_letter(request: LetterCreateRequest):
|
|
||||||
"""Erstellt einen neuen Elternbrief."""
|
|
||||||
logger.info(f"Creating new letter for student: {request.student_name}")
|
|
||||||
|
|
||||||
letter_data = {
|
|
||||||
"recipient_name": request.recipient_name,
|
|
||||||
"recipient_address": request.recipient_address,
|
|
||||||
"student_name": request.student_name,
|
|
||||||
"student_class": request.student_class,
|
|
||||||
"subject": request.subject,
|
|
||||||
"content": request.content,
|
|
||||||
"letter_type": request.letter_type,
|
|
||||||
"tone": request.tone,
|
|
||||||
"teacher_name": request.teacher_name,
|
|
||||||
"teacher_title": request.teacher_title,
|
|
||||||
"school_info": request.school_info.model_dump() if request.school_info else None,
|
|
||||||
"legal_references": [ref.model_dump() for ref in request.legal_references] if request.legal_references else None,
|
|
||||||
"gfk_principles_applied": request.gfk_principles_applied,
|
|
||||||
"gfk_score": None,
|
|
||||||
"status": LetterStatus.DRAFT,
|
|
||||||
"pdf_path": None,
|
|
||||||
"dsms_cid": None,
|
|
||||||
"sent_at": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
letter_id = _save_letter(letter_data)
|
|
||||||
letter_data["id"] = letter_id
|
|
||||||
logger.info(f"Letter created with ID: {letter_id}")
|
|
||||||
return LetterResponse(**letter_data)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE: Static routes must come BEFORE dynamic routes like /{letter_id}
|
|
||||||
@router.get("/types")
|
|
||||||
async def get_letter_types():
|
|
||||||
"""Gibt alle verfuegbaren Brieftypen zurueck."""
|
|
||||||
return {"types": [{"value": t.value, "label": _get_type_label(t)} for t in LetterType]}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/tones")
|
|
||||||
async def get_letter_tones():
|
|
||||||
"""Gibt alle verfuegbaren Tonalitaeten zurueck."""
|
|
||||||
return {"tones": [{"value": t.value, "label": _get_tone_label(t)} for t in LetterTone]}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{letter_id}", response_model=LetterResponse)
|
|
||||||
async def get_letter(letter_id: str):
|
|
||||||
"""Laedt einen gespeicherten Brief."""
|
|
||||||
logger.info(f"Getting letter: {letter_id}")
|
|
||||||
letter_data = _get_letter(letter_id)
|
|
||||||
return LetterResponse(**letter_data)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=LetterListResponse)
|
|
||||||
async def list_letters(
|
|
||||||
student_id: Optional[str] = Query(None),
|
|
||||||
class_name: Optional[str] = Query(None),
|
|
||||||
letter_type: Optional[LetterType] = Query(None),
|
|
||||||
status: Optional[LetterStatus] = Query(None),
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
page_size: int = Query(20, ge=1, le=100)
|
|
||||||
):
|
|
||||||
"""Listet alle gespeicherten Briefe mit optionalen Filtern."""
|
|
||||||
logger.info("Listing letters with filters")
|
|
||||||
|
|
||||||
filtered_letters = list(_letters_store.values())
|
|
||||||
if class_name:
|
|
||||||
filtered_letters = [l for l in filtered_letters if l.get("student_class") == class_name]
|
|
||||||
if letter_type:
|
|
||||||
filtered_letters = [l for l in filtered_letters if l.get("letter_type") == letter_type]
|
|
||||||
if status:
|
|
||||||
filtered_letters = [l for l in filtered_letters if l.get("status") == status]
|
|
||||||
|
|
||||||
filtered_letters.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
|
||||||
total = len(filtered_letters)
|
|
||||||
start = (page - 1) * page_size
|
|
||||||
paginated_letters = filtered_letters[start:start + page_size]
|
|
||||||
|
|
||||||
return LetterListResponse(
|
|
||||||
letters=[LetterResponse(**l) for l in paginated_letters],
|
|
||||||
total=total, page=page, page_size=page_size
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{letter_id}", response_model=LetterResponse)
|
|
||||||
async def update_letter(letter_id: str, request: LetterUpdateRequest):
|
|
||||||
"""Aktualisiert einen bestehenden Brief."""
|
|
||||||
logger.info(f"Updating letter: {letter_id}")
|
|
||||||
letter_data = _get_letter(letter_id)
|
|
||||||
|
|
||||||
update_data = request.model_dump(exclude_unset=True)
|
|
||||||
for key, value in update_data.items():
|
|
||||||
if value is not None:
|
|
||||||
if key == "school_info" and value:
|
|
||||||
letter_data[key] = value if isinstance(value, dict) else value.model_dump()
|
|
||||||
elif key == "legal_references" and value:
|
|
||||||
letter_data[key] = [ref if isinstance(ref, dict) else ref.model_dump() for ref in value]
|
|
||||||
else:
|
|
||||||
letter_data[key] = value
|
|
||||||
|
|
||||||
_save_letter(letter_data)
|
|
||||||
return LetterResponse(**letter_data)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{letter_id}")
|
|
||||||
async def delete_letter(letter_id: str):
|
|
||||||
"""Loescht einen Brief."""
|
|
||||||
logger.info(f"Deleting letter: {letter_id}")
|
|
||||||
if letter_id not in _letters_store:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Brief mit ID {letter_id} nicht gefunden")
|
|
||||||
del _letters_store[letter_id]
|
|
||||||
return {"message": f"Brief {letter_id} wurde geloescht"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/export-pdf")
|
|
||||||
async def export_letter_pdf(request: ExportPDFRequest):
|
|
||||||
"""Exportiert einen Brief als PDF."""
|
|
||||||
logger.info("Exporting letter as PDF")
|
|
||||||
|
|
||||||
if request.letter_id:
|
|
||||||
letter_data = _get_letter(request.letter_id)
|
|
||||||
elif request.letter_data:
|
|
||||||
letter_data = request.letter_data.model_dump()
|
|
||||||
else:
|
|
||||||
raise HTTPException(status_code=400, detail="Entweder letter_id oder letter_data muss angegeben werden")
|
|
||||||
|
|
||||||
if "date" not in letter_data:
|
|
||||||
letter_data["date"] = datetime.now().strftime("%d.%m.%Y")
|
|
||||||
|
|
||||||
try:
|
|
||||||
pdf_bytes = generate_letter_pdf(letter_data)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error generating PDF: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Fehler bei PDF-Generierung: {str(e)}")
|
|
||||||
|
|
||||||
student_name = letter_data.get("student_name", "Brief").replace(" ", "_")
|
|
||||||
date_str = datetime.now().strftime("%Y%m%d")
|
|
||||||
filename = f"Elternbrief_{student_name}_{date_str}.pdf"
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=pdf_bytes, media_type="application/pdf",
|
|
||||||
headers={"Content-Disposition": f"attachment; filename={filename}", "Content-Length": str(len(pdf_bytes))}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{letter_id}/export-pdf")
|
|
||||||
async def export_saved_letter_pdf(letter_id: str):
|
|
||||||
"""Exportiert einen gespeicherten Brief als PDF (Kurzform)."""
|
|
||||||
return await export_letter_pdf(ExportPDFRequest(letter_id=letter_id))
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/improve", response_model=ImproveResponse)
|
|
||||||
async def improve_letter_content(request: ImproveRequest):
|
|
||||||
"""Verbessert den Briefinhalt nach GFK-Prinzipien."""
|
|
||||||
logger.info("Improving letter content with GFK principles")
|
|
||||||
|
|
||||||
comm_service_url = os.getenv("COMMUNICATION_SERVICE_URL", "http://localhost:8000/v1/communication")
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
validate_response = await client.post(
|
|
||||||
f"{comm_service_url}/validate",
|
|
||||||
json={"text": request.content}, timeout=30.0
|
|
||||||
)
|
|
||||||
|
|
||||||
if validate_response.status_code != 200:
|
|
||||||
logger.warning(f"Validation service returned {validate_response.status_code}")
|
|
||||||
return ImproveResponse(
|
|
||||||
improved_content=request.content,
|
|
||||||
changes=["Verbesserungsservice nicht verfuegbar"],
|
|
||||||
gfk_score=0.5, gfk_principles_applied=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
validation_data = validate_response.json()
|
|
||||||
|
|
||||||
if validation_data.get("is_valid", False) and validation_data.get("gfk_score", 0) > 0.8:
|
|
||||||
return ImproveResponse(
|
|
||||||
improved_content=request.content,
|
|
||||||
changes=["Text entspricht bereits GFK-Standards"],
|
|
||||||
gfk_score=validation_data.get("gfk_score", 0.8),
|
|
||||||
gfk_principles_applied=validation_data.get("positive_elements", [])
|
|
||||||
)
|
|
||||||
|
|
||||||
return ImproveResponse(
|
|
||||||
improved_content=request.content,
|
|
||||||
changes=validation_data.get("suggestions", []),
|
|
||||||
gfk_score=validation_data.get("gfk_score", 0.5),
|
|
||||||
gfk_principles_applied=validation_data.get("positive_elements", [])
|
|
||||||
)
|
|
||||||
|
|
||||||
except httpx.TimeoutException:
|
|
||||||
logger.error("Timeout while calling communication service")
|
|
||||||
return ImproveResponse(
|
|
||||||
improved_content=request.content,
|
|
||||||
changes=["Zeitueberschreitung beim Verbesserungsservice"],
|
|
||||||
gfk_score=0.5, gfk_principles_applied=[]
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error improving content: {e}")
|
|
||||||
return ImproveResponse(
|
|
||||||
improved_content=request.content,
|
|
||||||
changes=[f"Fehler: {str(e)}"],
|
|
||||||
gfk_score=0.5, gfk_principles_applied=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{letter_id}/send", response_model=SendEmailResponse)
|
|
||||||
async def send_letter_email(letter_id: str, request: SendEmailRequest):
|
|
||||||
"""Versendet einen Brief per Email."""
|
|
||||||
logger.info(f"Sending letter {letter_id} to {request.recipient_email}")
|
|
||||||
letter_data = _get_letter(letter_id)
|
|
||||||
|
|
||||||
try:
|
|
||||||
pdf_attachment = None
|
|
||||||
if request.include_pdf:
|
|
||||||
letter_data["date"] = datetime.now().strftime("%d.%m.%Y")
|
|
||||||
pdf_bytes = generate_letter_pdf(letter_data)
|
|
||||||
pdf_attachment = {
|
|
||||||
"filename": f"Elternbrief_{letter_data.get('student_name', 'Brief').replace(' ', '_')}.pdf",
|
|
||||||
"content": pdf_bytes.hex(),
|
|
||||||
"content_type": "application/pdf"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
logger.info(f"Would send email: {letter_data.get('subject')} to {request.recipient_email}")
|
|
||||||
letter_data["status"] = LetterStatus.SENT
|
|
||||||
letter_data["sent_at"] = datetime.now()
|
|
||||||
_save_letter(letter_data)
|
|
||||||
|
|
||||||
return SendEmailResponse(
|
|
||||||
success=True,
|
|
||||||
message=f"Brief wurde an {request.recipient_email} gesendet",
|
|
||||||
sent_at=datetime.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error sending email: {e}")
|
|
||||||
return SendEmailResponse(success=False, message=f"Fehler beim Versenden: {str(e)}", sent_at=None)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/student/{student_id}", response_model=LetterListResponse)
|
|
||||||
async def get_letters_for_student(
|
|
||||||
student_id: str,
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
page_size: int = Query(20, ge=1, le=100)
|
|
||||||
):
|
|
||||||
"""Laedt alle Briefe fuer einen bestimmten Schueler."""
|
|
||||||
logger.info(f"Getting letters for student: {student_id}")
|
|
||||||
|
|
||||||
filtered_letters = [
|
|
||||||
l for l in _letters_store.values()
|
|
||||||
if student_id.lower() in l.get("student_name", "").lower()
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_letters.sort(key=lambda x: x.get("created_at", datetime.min), reverse=True)
|
|
||||||
total = len(filtered_letters)
|
|
||||||
start = (page - 1) * page_size
|
|
||||||
paginated_letters = filtered_letters[start:start + page_size]
|
|
||||||
|
|
||||||
return LetterListResponse(
|
|
||||||
letters=[LetterResponse(**l) for l in paginated_letters],
|
|
||||||
total=total, page=page, page_size=page_size
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,195 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to letters/models.py
|
||||||
Letters Models - Pydantic models and enums for Elternbrief-Verwaltung.
|
import importlib as _importlib
|
||||||
"""
|
import sys as _sys
|
||||||
from datetime import datetime
|
_sys.modules[__name__] = _importlib.import_module("letters.models")
|
||||||
from typing import Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Enums
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
class LetterType(str, Enum):
|
|
||||||
"""Typen von Elternbriefen."""
|
|
||||||
GENERAL = "general"
|
|
||||||
HALBJAHR = "halbjahr"
|
|
||||||
FEHLZEITEN = "fehlzeiten"
|
|
||||||
ELTERNABEND = "elternabend"
|
|
||||||
LOB = "lob"
|
|
||||||
CUSTOM = "custom"
|
|
||||||
|
|
||||||
|
|
||||||
class LetterTone(str, Enum):
|
|
||||||
"""Tonalitaet der Briefe."""
|
|
||||||
FORMAL = "formal"
|
|
||||||
PROFESSIONAL = "professional"
|
|
||||||
WARM = "warm"
|
|
||||||
CONCERNED = "concerned"
|
|
||||||
APPRECIATIVE = "appreciative"
|
|
||||||
|
|
||||||
|
|
||||||
class LetterStatus(str, Enum):
|
|
||||||
"""Status eines Briefes."""
|
|
||||||
DRAFT = "draft"
|
|
||||||
SENT = "sent"
|
|
||||||
ARCHIVED = "archived"
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Pydantic Models
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
class SchoolInfoModel(BaseModel):
|
|
||||||
"""Schulinformationen fuer Briefkopf."""
|
|
||||||
name: str
|
|
||||||
address: str
|
|
||||||
phone: str
|
|
||||||
email: str
|
|
||||||
website: Optional[str] = None
|
|
||||||
principal: Optional[str] = None
|
|
||||||
logo_path: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class LegalReferenceModel(BaseModel):
|
|
||||||
"""Rechtliche Referenz."""
|
|
||||||
law: str
|
|
||||||
paragraph: str
|
|
||||||
title: str
|
|
||||||
summary: Optional[str] = None
|
|
||||||
relevance: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class LetterCreateRequest(BaseModel):
|
|
||||||
"""Request zum Erstellen eines neuen Briefes."""
|
|
||||||
recipient_name: str = Field(..., description="Name des Empfaengers")
|
|
||||||
recipient_address: str = Field(..., description="Adresse des Empfaengers")
|
|
||||||
student_name: str = Field(..., description="Name des Schuelers")
|
|
||||||
student_class: str = Field(..., description="Klasse des Schuelers")
|
|
||||||
subject: str = Field(..., description="Betreff des Briefes")
|
|
||||||
content: str = Field(..., description="Inhalt des Briefes")
|
|
||||||
letter_type: LetterType = Field(LetterType.GENERAL, description="Art des Briefes")
|
|
||||||
tone: LetterTone = Field(LetterTone.PROFESSIONAL, description="Tonalitaet des Briefes")
|
|
||||||
teacher_name: str = Field(..., description="Name des Lehrers")
|
|
||||||
teacher_title: Optional[str] = Field(None, description="Titel des Lehrers")
|
|
||||||
school_info: Optional[SchoolInfoModel] = Field(None, description="Schulinformationen")
|
|
||||||
legal_references: Optional[List[LegalReferenceModel]] = Field(None, description="Rechtliche Referenzen")
|
|
||||||
gfk_principles_applied: Optional[List[str]] = Field(None, description="Angewandte GFK-Prinzipien")
|
|
||||||
|
|
||||||
|
|
||||||
class LetterUpdateRequest(BaseModel):
|
|
||||||
"""Request zum Aktualisieren eines Briefes."""
|
|
||||||
recipient_name: Optional[str] = None
|
|
||||||
recipient_address: Optional[str] = None
|
|
||||||
student_name: Optional[str] = None
|
|
||||||
student_class: Optional[str] = None
|
|
||||||
subject: Optional[str] = None
|
|
||||||
content: Optional[str] = None
|
|
||||||
letter_type: Optional[LetterType] = None
|
|
||||||
tone: Optional[LetterTone] = None
|
|
||||||
teacher_name: Optional[str] = None
|
|
||||||
teacher_title: Optional[str] = None
|
|
||||||
school_info: Optional[SchoolInfoModel] = None
|
|
||||||
legal_references: Optional[List[LegalReferenceModel]] = None
|
|
||||||
gfk_principles_applied: Optional[List[str]] = None
|
|
||||||
status: Optional[LetterStatus] = None
|
|
||||||
|
|
||||||
|
|
||||||
class LetterResponse(BaseModel):
|
|
||||||
"""Response mit Briefdaten."""
|
|
||||||
id: str
|
|
||||||
recipient_name: str
|
|
||||||
recipient_address: str
|
|
||||||
student_name: str
|
|
||||||
student_class: str
|
|
||||||
subject: str
|
|
||||||
content: str
|
|
||||||
letter_type: LetterType
|
|
||||||
tone: LetterTone
|
|
||||||
teacher_name: str
|
|
||||||
teacher_title: Optional[str]
|
|
||||||
school_info: Optional[SchoolInfoModel]
|
|
||||||
legal_references: Optional[List[LegalReferenceModel]]
|
|
||||||
gfk_principles_applied: Optional[List[str]]
|
|
||||||
gfk_score: Optional[float]
|
|
||||||
status: LetterStatus
|
|
||||||
pdf_path: Optional[str]
|
|
||||||
dsms_cid: Optional[str]
|
|
||||||
sent_at: Optional[datetime]
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class LetterListResponse(BaseModel):
|
|
||||||
"""Response mit Liste von Briefen."""
|
|
||||||
letters: List[LetterResponse]
|
|
||||||
total: int
|
|
||||||
page: int
|
|
||||||
page_size: int
|
|
||||||
|
|
||||||
|
|
||||||
class ExportPDFRequest(BaseModel):
|
|
||||||
"""Request zum PDF-Export."""
|
|
||||||
letter_id: Optional[str] = Field(None, description="ID eines gespeicherten Briefes")
|
|
||||||
letter_data: Optional[LetterCreateRequest] = Field(None, description="Oder direkte Briefdaten")
|
|
||||||
|
|
||||||
|
|
||||||
class ImproveRequest(BaseModel):
|
|
||||||
"""Request zur GFK-Verbesserung."""
|
|
||||||
content: str = Field(..., description="Text zur Verbesserung")
|
|
||||||
communication_type: Optional[str] = Field("general_info", description="Art der Kommunikation")
|
|
||||||
tone: Optional[str] = Field("professional", description="Gewuenschte Tonalitaet")
|
|
||||||
|
|
||||||
|
|
||||||
class ImproveResponse(BaseModel):
|
|
||||||
"""Response mit verbessertem Text."""
|
|
||||||
improved_content: str
|
|
||||||
changes: List[str]
|
|
||||||
gfk_score: float
|
|
||||||
gfk_principles_applied: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class SendEmailRequest(BaseModel):
|
|
||||||
"""Request zum Email-Versand."""
|
|
||||||
letter_id: str
|
|
||||||
recipient_email: str
|
|
||||||
cc_emails: Optional[List[str]] = None
|
|
||||||
include_pdf: bool = True
|
|
||||||
|
|
||||||
|
|
||||||
class SendEmailResponse(BaseModel):
|
|
||||||
"""Response nach Email-Versand."""
|
|
||||||
success: bool
|
|
||||||
message: str
|
|
||||||
sent_at: Optional[datetime]
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Helper Functions
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
def get_type_label(letter_type: LetterType) -> str:
|
|
||||||
"""Gibt menschenlesbare Labels fuer Brieftypen zurueck."""
|
|
||||||
labels = {
|
|
||||||
LetterType.GENERAL: "Allgemeine Information",
|
|
||||||
LetterType.HALBJAHR: "Halbjahresinformation",
|
|
||||||
LetterType.FEHLZEITEN: "Fehlzeiten-Mitteilung",
|
|
||||||
LetterType.ELTERNABEND: "Einladung Elternabend",
|
|
||||||
LetterType.LOB: "Positives Feedback",
|
|
||||||
LetterType.CUSTOM: "Benutzerdefiniert",
|
|
||||||
}
|
|
||||||
return labels.get(letter_type, letter_type.value)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tone_label(tone: LetterTone) -> str:
|
|
||||||
"""Gibt menschenlesbare Labels fuer Tonalitaeten zurueck."""
|
|
||||||
labels = {
|
|
||||||
LetterTone.FORMAL: "Sehr foermlich",
|
|
||||||
LetterTone.PROFESSIONAL: "Professionell-freundlich",
|
|
||||||
LetterTone.WARM: "Warmherzig",
|
|
||||||
LetterTone.CONCERNED: "Besorgt",
|
|
||||||
LetterTone.APPRECIATIVE: "Wertschaetzend",
|
|
||||||
}
|
|
||||||
return labels.get(tone, tone.value)
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# messenger — Kontakte, Konversationen, Nachrichten, Gruppen.
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
"""
|
||||||
|
BreakPilot Messenger API — Barrel Re-export.
|
||||||
|
|
||||||
|
Stellt Endpoints fuer Kontakte, Konversationen, Nachrichten,
|
||||||
|
CSV-Import, Gruppenmanagement und Templates bereit.
|
||||||
|
|
||||||
|
Split into:
|
||||||
|
- messenger_models.py: Pydantic models
|
||||||
|
- messenger_helpers.py: JSON file storage & default templates
|
||||||
|
- messenger_contacts.py: Contact CRUD & CSV import/export
|
||||||
|
- messenger_conversations.py: Conversations, messages, groups, templates, stats
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .contacts import router as _contacts_router
|
||||||
|
from .conversations import router as _conversations_router
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/messenger", tags=["Messenger"])
|
||||||
|
router.include_router(_contacts_router)
|
||||||
|
router.include_router(_conversations_router)
|
||||||
@@ -0,0 +1,251 @@
|
|||||||
|
"""
|
||||||
|
Messenger API - Contact Routes.
|
||||||
|
|
||||||
|
CRUD, CSV import/export for contacts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import uuid
|
||||||
|
from io import StringIO
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, UploadFile, File, Query
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Contact,
|
||||||
|
ContactCreate,
|
||||||
|
ContactUpdate,
|
||||||
|
CSVImportResult,
|
||||||
|
)
|
||||||
|
from .helpers import get_contacts, save_contacts
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Messenger"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# CONTACTS ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/contacts", response_model=List[Contact])
|
||||||
|
async def list_contacts(
|
||||||
|
role: Optional[str] = Query(None, description="Filter by role"),
|
||||||
|
class_name: Optional[str] = Query(None, description="Filter by class"),
|
||||||
|
search: Optional[str] = Query(None, description="Search in name/email")
|
||||||
|
):
|
||||||
|
"""Listet alle Kontakte auf."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
|
||||||
|
# Filter anwenden
|
||||||
|
if role:
|
||||||
|
contacts = [c for c in contacts if c.get("role") == role]
|
||||||
|
if class_name:
|
||||||
|
contacts = [c for c in contacts if c.get("class_name") == class_name]
|
||||||
|
if search:
|
||||||
|
search_lower = search.lower()
|
||||||
|
contacts = [c for c in contacts if
|
||||||
|
search_lower in c.get("name", "").lower() or
|
||||||
|
search_lower in (c.get("email") or "").lower() or
|
||||||
|
search_lower in (c.get("student_name") or "").lower()]
|
||||||
|
|
||||||
|
return contacts
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/contacts", response_model=Contact)
|
||||||
|
async def create_contact(contact: ContactCreate):
|
||||||
|
"""Erstellt einen neuen Kontakt."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
|
||||||
|
# Pruefen ob Email bereits existiert
|
||||||
|
if contact.email:
|
||||||
|
existing = [c for c in contacts if c.get("email") == contact.email]
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=400, detail="Kontakt mit dieser Email existiert bereits")
|
||||||
|
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
new_contact = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"online": False,
|
||||||
|
"last_seen": None,
|
||||||
|
**contact.dict()
|
||||||
|
}
|
||||||
|
|
||||||
|
contacts.append(new_contact)
|
||||||
|
save_contacts(contacts)
|
||||||
|
|
||||||
|
return new_contact
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/contacts/{contact_id}", response_model=Contact)
|
||||||
|
async def get_contact(contact_id: str):
|
||||||
|
"""Ruft einen einzelnen Kontakt ab."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
contact = next((c for c in contacts if c["id"] == contact_id), None)
|
||||||
|
|
||||||
|
if not contact:
|
||||||
|
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
||||||
|
|
||||||
|
return contact
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/contacts/{contact_id}", response_model=Contact)
|
||||||
|
async def update_contact(contact_id: str, update: ContactUpdate):
|
||||||
|
"""Aktualisiert einen Kontakt."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
contact_idx = next((i for i, c in enumerate(contacts) if c["id"] == contact_id), None)
|
||||||
|
|
||||||
|
if contact_idx is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
||||||
|
|
||||||
|
update_data = update.dict(exclude_unset=True)
|
||||||
|
contacts[contact_idx].update(update_data)
|
||||||
|
contacts[contact_idx]["updated_at"] = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
save_contacts(contacts)
|
||||||
|
return contacts[contact_idx]
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/contacts/{contact_id}")
|
||||||
|
async def delete_contact(contact_id: str):
|
||||||
|
"""Loescht einen Kontakt."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
contacts = [c for c in contacts if c["id"] != contact_id]
|
||||||
|
save_contacts(contacts)
|
||||||
|
|
||||||
|
return {"status": "deleted", "id": contact_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/contacts/import", response_model=CSVImportResult)
|
||||||
|
async def import_contacts_csv(file: UploadFile = File(...)):
|
||||||
|
"""
|
||||||
|
Importiert Kontakte aus einer CSV-Datei.
|
||||||
|
|
||||||
|
Erwartete Spalten:
|
||||||
|
- name (required)
|
||||||
|
- email
|
||||||
|
- phone
|
||||||
|
- role (parent/teacher/staff/student)
|
||||||
|
- student_name
|
||||||
|
- class_name
|
||||||
|
- notes
|
||||||
|
- tags (komma-separiert)
|
||||||
|
"""
|
||||||
|
if not file.filename.endswith('.csv'):
|
||||||
|
raise HTTPException(status_code=400, detail="Nur CSV-Dateien werden unterstuetzt")
|
||||||
|
|
||||||
|
content = await file.read()
|
||||||
|
try:
|
||||||
|
text = content.decode('utf-8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
text = content.decode('latin-1')
|
||||||
|
|
||||||
|
contacts = get_contacts()
|
||||||
|
existing_emails = {c.get("email") for c in contacts if c.get("email")}
|
||||||
|
|
||||||
|
imported = []
|
||||||
|
skipped = 0
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
reader = csv.DictReader(StringIO(text), delimiter=';') # Deutsche CSV meist mit Semikolon
|
||||||
|
if not reader.fieldnames or 'name' not in [f.lower() for f in reader.fieldnames]:
|
||||||
|
# Versuche mit Komma
|
||||||
|
reader = csv.DictReader(StringIO(text), delimiter=',')
|
||||||
|
|
||||||
|
for row_num, row in enumerate(reader, start=2):
|
||||||
|
try:
|
||||||
|
# Normalisiere Spaltennamen
|
||||||
|
row = {k.lower().strip(): v.strip() if v else "" for k, v in row.items()}
|
||||||
|
|
||||||
|
name = row.get('name') or row.get('kontakt') or row.get('elternname')
|
||||||
|
if not name:
|
||||||
|
errors.append(f"Zeile {row_num}: Name fehlt")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
email = row.get('email') or row.get('e-mail') or row.get('mail')
|
||||||
|
if email and email in existing_emails:
|
||||||
|
errors.append(f"Zeile {row_num}: Email {email} existiert bereits")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
tags_str = row.get('tags') or row.get('kategorien') or ""
|
||||||
|
tags = [t.strip() for t in tags_str.split(',') if t.strip()]
|
||||||
|
|
||||||
|
# Matrix-ID und preferred_channel auslesen
|
||||||
|
matrix_id = row.get('matrix_id') or row.get('matrix') or None
|
||||||
|
preferred_channel = row.get('preferred_channel') or row.get('kanal') or "email"
|
||||||
|
if preferred_channel not in ["email", "matrix", "pwa"]:
|
||||||
|
preferred_channel = "email"
|
||||||
|
|
||||||
|
new_contact = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"name": name,
|
||||||
|
"email": email if email else None,
|
||||||
|
"phone": row.get('phone') or row.get('telefon') or row.get('tel'),
|
||||||
|
"role": row.get('role') or row.get('rolle') or "parent",
|
||||||
|
"student_name": row.get('student_name') or row.get('schueler') or row.get('kind'),
|
||||||
|
"class_name": row.get('class_name') or row.get('klasse'),
|
||||||
|
"notes": row.get('notes') or row.get('notizen') or row.get('bemerkungen'),
|
||||||
|
"tags": tags,
|
||||||
|
"matrix_id": matrix_id if matrix_id else None,
|
||||||
|
"preferred_channel": preferred_channel,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"online": False,
|
||||||
|
"last_seen": None
|
||||||
|
}
|
||||||
|
|
||||||
|
contacts.append(new_contact)
|
||||||
|
imported.append(new_contact)
|
||||||
|
if email:
|
||||||
|
existing_emails.add(email)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
errors.append(f"Zeile {row_num}: {str(e)}")
|
||||||
|
skipped += 1
|
||||||
|
|
||||||
|
save_contacts(contacts)
|
||||||
|
|
||||||
|
return CSVImportResult(
|
||||||
|
imported=len(imported),
|
||||||
|
skipped=skipped,
|
||||||
|
errors=errors[:20], # Maximal 20 Fehler zurueckgeben
|
||||||
|
contacts=imported
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/contacts/export/csv")
|
||||||
|
async def export_contacts_csv():
|
||||||
|
"""Exportiert alle Kontakte als CSV."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
|
||||||
|
output = StringIO()
|
||||||
|
fieldnames = ['name', 'email', 'phone', 'role', 'student_name', 'class_name', 'notes', 'tags', 'matrix_id', 'preferred_channel']
|
||||||
|
writer = csv.DictWriter(output, fieldnames=fieldnames, delimiter=';')
|
||||||
|
writer.writeheader()
|
||||||
|
|
||||||
|
for contact in contacts:
|
||||||
|
writer.writerow({
|
||||||
|
'name': contact.get('name', ''),
|
||||||
|
'email': contact.get('email', ''),
|
||||||
|
'phone': contact.get('phone', ''),
|
||||||
|
'role': contact.get('role', ''),
|
||||||
|
'student_name': contact.get('student_name', ''),
|
||||||
|
'class_name': contact.get('class_name', ''),
|
||||||
|
'notes': contact.get('notes', ''),
|
||||||
|
'tags': ','.join(contact.get('tags', [])),
|
||||||
|
'matrix_id': contact.get('matrix_id', ''),
|
||||||
|
'preferred_channel': contact.get('preferred_channel', 'email')
|
||||||
|
})
|
||||||
|
|
||||||
|
output.seek(0)
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
iter([output.getvalue()]),
|
||||||
|
media_type="text/csv",
|
||||||
|
headers={"Content-Disposition": "attachment; filename=kontakte.csv"}
|
||||||
|
)
|
||||||
@@ -0,0 +1,405 @@
|
|||||||
|
"""
|
||||||
|
Messenger API - Conversation, Message, Group, Template & Stats Routes.
|
||||||
|
|
||||||
|
Conversations CRUD, message send/read, groups, templates, stats.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
Conversation,
|
||||||
|
Group,
|
||||||
|
GroupCreate,
|
||||||
|
Message,
|
||||||
|
MessageBase,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
DATA_DIR,
|
||||||
|
DEFAULT_TEMPLATES,
|
||||||
|
get_contacts,
|
||||||
|
get_conversations,
|
||||||
|
save_conversations,
|
||||||
|
get_messages,
|
||||||
|
save_messages,
|
||||||
|
get_groups,
|
||||||
|
save_groups,
|
||||||
|
load_json,
|
||||||
|
save_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Messenger"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# GROUPS ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/groups", response_model=List[Group])
|
||||||
|
async def list_groups():
|
||||||
|
"""Listet alle Gruppen auf."""
|
||||||
|
return get_groups()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/groups", response_model=Group)
|
||||||
|
async def create_group(group: GroupCreate):
|
||||||
|
"""Erstellt eine neue Gruppe."""
|
||||||
|
groups = get_groups()
|
||||||
|
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
new_group = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
**group.dict()
|
||||||
|
}
|
||||||
|
|
||||||
|
groups.append(new_group)
|
||||||
|
save_groups(groups)
|
||||||
|
|
||||||
|
return new_group
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/groups/{group_id}/members")
|
||||||
|
async def update_group_members(group_id: str, member_ids: List[str]):
|
||||||
|
"""Aktualisiert die Mitglieder einer Gruppe."""
|
||||||
|
groups = get_groups()
|
||||||
|
group_idx = next((i for i, g in enumerate(groups) if g["id"] == group_id), None)
|
||||||
|
|
||||||
|
if group_idx is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Gruppe nicht gefunden")
|
||||||
|
|
||||||
|
groups[group_idx]["member_ids"] = member_ids
|
||||||
|
groups[group_idx]["updated_at"] = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
save_groups(groups)
|
||||||
|
return groups[group_idx]
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/groups/{group_id}")
|
||||||
|
async def delete_group(group_id: str):
|
||||||
|
"""Loescht eine Gruppe."""
|
||||||
|
groups = get_groups()
|
||||||
|
groups = [g for g in groups if g["id"] != group_id]
|
||||||
|
save_groups(groups)
|
||||||
|
|
||||||
|
return {"status": "deleted", "id": group_id}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# CONVERSATIONS ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/conversations", response_model=List[Conversation])
|
||||||
|
async def list_conversations():
|
||||||
|
"""Listet alle Konversationen auf."""
|
||||||
|
conversations = get_conversations()
|
||||||
|
messages = get_messages()
|
||||||
|
|
||||||
|
# Unread count und letzte Nachricht hinzufuegen
|
||||||
|
for conv in conversations:
|
||||||
|
conv_messages = [m for m in messages if m.get("conversation_id") == conv["id"]]
|
||||||
|
conv["unread_count"] = len([m for m in conv_messages if not m.get("read") and m.get("sender_id") != "self"])
|
||||||
|
|
||||||
|
if conv_messages:
|
||||||
|
last_msg = max(conv_messages, key=lambda m: m.get("timestamp", ""))
|
||||||
|
conv["last_message"] = last_msg.get("content", "")[:50]
|
||||||
|
conv["last_message_time"] = last_msg.get("timestamp")
|
||||||
|
|
||||||
|
# Nach letzter Nachricht sortieren
|
||||||
|
conversations.sort(key=lambda c: c.get("last_message_time") or "", reverse=True)
|
||||||
|
|
||||||
|
return conversations
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/conversations", response_model=Conversation)
|
||||||
|
async def create_conversation(contact_id: Optional[str] = None, group_id: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Erstellt eine neue Konversation.
|
||||||
|
Entweder mit einem Kontakt (1:1) oder einer Gruppe.
|
||||||
|
"""
|
||||||
|
conversations = get_conversations()
|
||||||
|
|
||||||
|
if not contact_id and not group_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Entweder contact_id oder group_id erforderlich")
|
||||||
|
|
||||||
|
# Pruefen ob Konversation bereits existiert
|
||||||
|
if contact_id:
|
||||||
|
existing = next((c for c in conversations
|
||||||
|
if not c.get("is_group") and contact_id in c.get("participant_ids", [])), None)
|
||||||
|
if existing:
|
||||||
|
return existing
|
||||||
|
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
if group_id:
|
||||||
|
groups = get_groups()
|
||||||
|
group = next((g for g in groups if g["id"] == group_id), None)
|
||||||
|
if not group:
|
||||||
|
raise HTTPException(status_code=404, detail="Gruppe nicht gefunden")
|
||||||
|
|
||||||
|
new_conv = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"name": group.get("name"),
|
||||||
|
"is_group": True,
|
||||||
|
"participant_ids": group.get("member_ids", []),
|
||||||
|
"group_id": group_id,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"last_message": None,
|
||||||
|
"last_message_time": None,
|
||||||
|
"unread_count": 0
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
contacts = get_contacts()
|
||||||
|
contact = next((c for c in contacts if c["id"] == contact_id), None)
|
||||||
|
if not contact:
|
||||||
|
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
||||||
|
|
||||||
|
new_conv = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"name": contact.get("name"),
|
||||||
|
"is_group": False,
|
||||||
|
"participant_ids": [contact_id],
|
||||||
|
"group_id": None,
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
"last_message": None,
|
||||||
|
"last_message_time": None,
|
||||||
|
"unread_count": 0
|
||||||
|
}
|
||||||
|
|
||||||
|
conversations.append(new_conv)
|
||||||
|
save_conversations(conversations)
|
||||||
|
|
||||||
|
return new_conv
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/conversations/{conversation_id}", response_model=Conversation)
|
||||||
|
async def get_conversation(conversation_id: str):
|
||||||
|
"""Ruft eine Konversation ab."""
|
||||||
|
conversations = get_conversations()
|
||||||
|
conv = next((c for c in conversations if c["id"] == conversation_id), None)
|
||||||
|
|
||||||
|
if not conv:
|
||||||
|
raise HTTPException(status_code=404, detail="Konversation nicht gefunden")
|
||||||
|
|
||||||
|
return conv
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/conversations/{conversation_id}")
|
||||||
|
async def delete_conversation(conversation_id: str):
|
||||||
|
"""Loescht eine Konversation und alle zugehoerigen Nachrichten."""
|
||||||
|
conversations = get_conversations()
|
||||||
|
conversations = [c for c in conversations if c["id"] != conversation_id]
|
||||||
|
save_conversations(conversations)
|
||||||
|
|
||||||
|
messages = get_messages()
|
||||||
|
messages = [m for m in messages if m.get("conversation_id") != conversation_id]
|
||||||
|
save_messages(messages)
|
||||||
|
|
||||||
|
return {"status": "deleted", "id": conversation_id}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# MESSAGES ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/conversations/{conversation_id}/messages", response_model=List[Message])
|
||||||
|
async def list_messages(
|
||||||
|
conversation_id: str,
|
||||||
|
limit: int = Query(50, ge=1, le=200),
|
||||||
|
before: Optional[str] = Query(None, description="Load messages before this timestamp")
|
||||||
|
):
|
||||||
|
"""Ruft Nachrichten einer Konversation ab."""
|
||||||
|
messages = get_messages()
|
||||||
|
conv_messages = [m for m in messages if m.get("conversation_id") == conversation_id]
|
||||||
|
|
||||||
|
if before:
|
||||||
|
conv_messages = [m for m in conv_messages if m.get("timestamp", "") < before]
|
||||||
|
|
||||||
|
# Nach Zeit sortieren (neueste zuletzt)
|
||||||
|
conv_messages.sort(key=lambda m: m.get("timestamp", ""))
|
||||||
|
|
||||||
|
return conv_messages[-limit:]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/conversations/{conversation_id}/messages", response_model=Message)
|
||||||
|
async def send_message(conversation_id: str, message: MessageBase):
|
||||||
|
"""
|
||||||
|
Sendet eine Nachricht in einer Konversation.
|
||||||
|
|
||||||
|
Wenn send_email=True und der Kontakt eine Email-Adresse hat,
|
||||||
|
wird die Nachricht auch per Email versendet.
|
||||||
|
"""
|
||||||
|
conversations = get_conversations()
|
||||||
|
conv = next((c for c in conversations if c["id"] == conversation_id), None)
|
||||||
|
|
||||||
|
if not conv:
|
||||||
|
raise HTTPException(status_code=404, detail="Konversation nicht gefunden")
|
||||||
|
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
new_message = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"conversation_id": conversation_id,
|
||||||
|
"sender_id": "self",
|
||||||
|
"timestamp": now,
|
||||||
|
"read": True,
|
||||||
|
"read_at": now,
|
||||||
|
"email_sent": False,
|
||||||
|
"email_sent_at": None,
|
||||||
|
"email_error": None,
|
||||||
|
**message.dict()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Email-Versand wenn gewuenscht
|
||||||
|
if message.send_email and not conv.get("is_group"):
|
||||||
|
# Kontakt laden
|
||||||
|
participant_ids = conv.get("participant_ids", [])
|
||||||
|
if participant_ids:
|
||||||
|
contacts = get_contacts()
|
||||||
|
contact = next((c for c in contacts if c["id"] == participant_ids[0]), None)
|
||||||
|
|
||||||
|
if contact and contact.get("email"):
|
||||||
|
try:
|
||||||
|
from email_service import email_service
|
||||||
|
|
||||||
|
result = email_service.send_messenger_notification(
|
||||||
|
to_email=contact["email"],
|
||||||
|
to_name=contact.get("name", ""),
|
||||||
|
sender_name="BreakPilot Lehrer",
|
||||||
|
message_content=message.content
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.success:
|
||||||
|
new_message["email_sent"] = True
|
||||||
|
new_message["email_sent_at"] = result.sent_at
|
||||||
|
else:
|
||||||
|
new_message["email_error"] = result.error
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
new_message["email_error"] = str(e)
|
||||||
|
|
||||||
|
messages = get_messages()
|
||||||
|
messages.append(new_message)
|
||||||
|
save_messages(messages)
|
||||||
|
|
||||||
|
# Konversation aktualisieren
|
||||||
|
conv_idx = next(i for i, c in enumerate(conversations) if c["id"] == conversation_id)
|
||||||
|
conversations[conv_idx]["last_message"] = message.content[:50]
|
||||||
|
conversations[conv_idx]["last_message_time"] = now
|
||||||
|
conversations[conv_idx]["updated_at"] = now
|
||||||
|
save_conversations(conversations)
|
||||||
|
|
||||||
|
return new_message
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/messages/{message_id}/read")
|
||||||
|
async def mark_message_read(message_id: str):
|
||||||
|
"""Markiert eine Nachricht als gelesen."""
|
||||||
|
messages = get_messages()
|
||||||
|
msg_idx = next((i for i, m in enumerate(messages) if m["id"] == message_id), None)
|
||||||
|
|
||||||
|
if msg_idx is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Nachricht nicht gefunden")
|
||||||
|
|
||||||
|
messages[msg_idx]["read"] = True
|
||||||
|
messages[msg_idx]["read_at"] = datetime.utcnow().isoformat()
|
||||||
|
save_messages(messages)
|
||||||
|
|
||||||
|
return {"status": "read", "id": message_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/conversations/{conversation_id}/read-all")
|
||||||
|
async def mark_all_messages_read(conversation_id: str):
|
||||||
|
"""Markiert alle Nachrichten einer Konversation als gelesen."""
|
||||||
|
messages = get_messages()
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
for msg in messages:
|
||||||
|
if msg.get("conversation_id") == conversation_id and not msg.get("read"):
|
||||||
|
msg["read"] = True
|
||||||
|
msg["read_at"] = now
|
||||||
|
|
||||||
|
save_messages(messages)
|
||||||
|
|
||||||
|
return {"status": "all_read", "conversation_id": conversation_id}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# TEMPLATES ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/templates")
|
||||||
|
async def list_templates():
|
||||||
|
"""Listet alle Nachrichtenvorlagen auf."""
|
||||||
|
templates_file = DATA_DIR / "templates.json"
|
||||||
|
if templates_file.exists():
|
||||||
|
templates = load_json(templates_file)
|
||||||
|
else:
|
||||||
|
templates = DEFAULT_TEMPLATES
|
||||||
|
save_json(templates_file, templates)
|
||||||
|
|
||||||
|
return templates
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/templates")
|
||||||
|
async def create_template(name: str, content: str, category: str = "custom"):
|
||||||
|
"""Erstellt eine neue Vorlage."""
|
||||||
|
templates_file = DATA_DIR / "templates.json"
|
||||||
|
templates = load_json(templates_file) if templates_file.exists() else DEFAULT_TEMPLATES.copy()
|
||||||
|
|
||||||
|
new_template = {
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"name": name,
|
||||||
|
"content": content,
|
||||||
|
"category": category
|
||||||
|
}
|
||||||
|
|
||||||
|
templates.append(new_template)
|
||||||
|
save_json(templates_file, templates)
|
||||||
|
|
||||||
|
return new_template
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/templates/{template_id}")
|
||||||
|
async def delete_template(template_id: str):
|
||||||
|
"""Loescht eine Vorlage."""
|
||||||
|
templates_file = DATA_DIR / "templates.json"
|
||||||
|
templates = load_json(templates_file) if templates_file.exists() else DEFAULT_TEMPLATES.copy()
|
||||||
|
|
||||||
|
templates = [t for t in templates if t["id"] != template_id]
|
||||||
|
save_json(templates_file, templates)
|
||||||
|
|
||||||
|
return {"status": "deleted", "id": template_id}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# STATS ENDPOINT
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/stats")
|
||||||
|
async def get_messenger_stats():
|
||||||
|
"""Gibt Statistiken zum Messenger zurueck."""
|
||||||
|
contacts = get_contacts()
|
||||||
|
conversations = get_conversations()
|
||||||
|
messages = get_messages()
|
||||||
|
groups = get_groups()
|
||||||
|
|
||||||
|
unread_total = sum(1 for m in messages if not m.get("read") and m.get("sender_id") != "self")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_contacts": len(contacts),
|
||||||
|
"total_groups": len(groups),
|
||||||
|
"total_conversations": len(conversations),
|
||||||
|
"total_messages": len(messages),
|
||||||
|
"unread_messages": unread_total,
|
||||||
|
"contacts_by_role": {
|
||||||
|
role: len([c for c in contacts if c.get("role") == role])
|
||||||
|
for role in set(c.get("role", "parent") for c in contacts)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,105 @@
|
|||||||
|
"""
|
||||||
|
Messenger API - Data Helpers.
|
||||||
|
|
||||||
|
JSON-based file storage for contacts, conversations, messages, and groups.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import List, Dict
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Datenspeicherung (JSON-basiert fuer einfache Persistenz)
|
||||||
|
DATA_DIR = Path(__file__).parent / "data" / "messenger"
|
||||||
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
CONTACTS_FILE = DATA_DIR / "contacts.json"
|
||||||
|
CONVERSATIONS_FILE = DATA_DIR / "conversations.json"
|
||||||
|
MESSAGES_FILE = DATA_DIR / "messages.json"
|
||||||
|
GROUPS_FILE = DATA_DIR / "groups.json"
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(filepath: Path) -> List[Dict]:
|
||||||
|
"""Laedt JSON-Daten aus Datei."""
|
||||||
|
if not filepath.exists():
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
with open(filepath, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def save_json(filepath: Path, data: List[Dict]):
|
||||||
|
"""Speichert Daten in JSON-Datei."""
|
||||||
|
with open(filepath, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def get_contacts() -> List[Dict]:
|
||||||
|
return load_json(CONTACTS_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def save_contacts(contacts: List[Dict]):
|
||||||
|
save_json(CONTACTS_FILE, contacts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_conversations() -> List[Dict]:
|
||||||
|
return load_json(CONVERSATIONS_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def save_conversations(conversations: List[Dict]):
|
||||||
|
save_json(CONVERSATIONS_FILE, conversations)
|
||||||
|
|
||||||
|
|
||||||
|
def get_messages() -> List[Dict]:
|
||||||
|
return load_json(MESSAGES_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def save_messages(messages: List[Dict]):
|
||||||
|
save_json(MESSAGES_FILE, messages)
|
||||||
|
|
||||||
|
|
||||||
|
def get_groups() -> List[Dict]:
|
||||||
|
return load_json(GROUPS_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def save_groups(groups: List[Dict]):
|
||||||
|
save_json(GROUPS_FILE, groups)
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# DEFAULT TEMPLATES
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
DEFAULT_TEMPLATES = [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"name": "Terminbestaetigung",
|
||||||
|
"content": "Vielen Dank fuer Ihre Terminanfrage. Ich bestaetige den Termin am [DATUM] um [UHRZEIT]. Bitte geben Sie mir Bescheid, falls sich etwas aendern sollte.",
|
||||||
|
"category": "termin"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"name": "Hausaufgaben-Info",
|
||||||
|
"content": "Zur Information: Die Hausaufgaben fuer diese Woche umfassen [THEMA]. Abgabetermin ist [DATUM]. Bei Fragen stehe ich gerne zur Verfuegung.",
|
||||||
|
"category": "hausaufgaben"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"name": "Entschuldigung bestaetigen",
|
||||||
|
"content": "Ich bestaetige den Erhalt der Entschuldigung fuer [NAME] am [DATUM]. Die Fehlzeiten wurden entsprechend vermerkt.",
|
||||||
|
"category": "entschuldigung"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "4",
|
||||||
|
"name": "Gespraechsanfrage",
|
||||||
|
"content": "Ich wuerde gerne einen Termin fuer ein Gespraech mit Ihnen vereinbaren, um [THEMA] zu besprechen. Waeren Sie am [DATUM] um [UHRZEIT] verfuegbar?",
|
||||||
|
"category": "gespraech"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "5",
|
||||||
|
"name": "Krankmeldung bestaetigen",
|
||||||
|
"content": "Vielen Dank fuer Ihre Krankmeldung fuer [NAME]. Ich wuensche gute Besserung. Bitte reichen Sie eine schriftliche Entschuldigung nach, sobald Ihr Kind wieder gesund ist.",
|
||||||
|
"category": "krankmeldung"
|
||||||
|
}
|
||||||
|
]
|
||||||
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
Messenger API - Pydantic Models.
|
||||||
|
|
||||||
|
Data models for contacts, conversations, messages, and groups.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# CONTACT MODELS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class ContactBase(BaseModel):
|
||||||
|
"""Basis-Modell fuer Kontakte."""
|
||||||
|
name: str = Field(..., min_length=1, max_length=200)
|
||||||
|
email: Optional[str] = None
|
||||||
|
phone: Optional[str] = None
|
||||||
|
role: str = Field(default="parent", description="parent, teacher, staff, student")
|
||||||
|
student_name: Optional[str] = Field(None, description="Name des zugehoerigen Schuelers")
|
||||||
|
class_name: Optional[str] = Field(None, description="Klasse z.B. 10a")
|
||||||
|
notes: Optional[str] = None
|
||||||
|
tags: List[str] = Field(default_factory=list)
|
||||||
|
matrix_id: Optional[str] = Field(None, description="Matrix-ID z.B. @user:matrix.org")
|
||||||
|
preferred_channel: str = Field(default="email", description="email, matrix, pwa")
|
||||||
|
|
||||||
|
|
||||||
|
class ContactCreate(ContactBase):
|
||||||
|
"""Model fuer neuen Kontakt."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Contact(ContactBase):
|
||||||
|
"""Vollstaendiger Kontakt mit ID."""
|
||||||
|
id: str
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
online: bool = False
|
||||||
|
last_seen: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ContactUpdate(BaseModel):
|
||||||
|
"""Update-Model fuer Kontakte."""
|
||||||
|
name: Optional[str] = None
|
||||||
|
email: Optional[str] = None
|
||||||
|
phone: Optional[str] = None
|
||||||
|
role: Optional[str] = None
|
||||||
|
student_name: Optional[str] = None
|
||||||
|
class_name: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
tags: Optional[List[str]] = None
|
||||||
|
matrix_id: Optional[str] = None
|
||||||
|
preferred_channel: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# GROUP MODELS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class GroupBase(BaseModel):
|
||||||
|
"""Basis-Modell fuer Gruppen."""
|
||||||
|
name: str = Field(..., min_length=1, max_length=100)
|
||||||
|
description: Optional[str] = None
|
||||||
|
group_type: str = Field(default="class", description="class, department, custom")
|
||||||
|
|
||||||
|
|
||||||
|
class GroupCreate(GroupBase):
|
||||||
|
"""Model fuer neue Gruppe."""
|
||||||
|
member_ids: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class Group(GroupBase):
|
||||||
|
"""Vollstaendige Gruppe mit ID."""
|
||||||
|
id: str
|
||||||
|
member_ids: List[str] = []
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# MESSAGE MODELS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class MessageBase(BaseModel):
|
||||||
|
"""Basis-Modell fuer Nachrichten."""
|
||||||
|
content: str = Field(..., min_length=1)
|
||||||
|
content_type: str = Field(default="text", description="text, file, image")
|
||||||
|
file_url: Optional[str] = None
|
||||||
|
send_email: bool = Field(default=False, description="Nachricht auch per Email senden")
|
||||||
|
|
||||||
|
|
||||||
|
class MessageCreate(MessageBase):
|
||||||
|
"""Model fuer neue Nachricht."""
|
||||||
|
conversation_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class Message(MessageBase):
|
||||||
|
"""Vollstaendige Nachricht mit ID."""
|
||||||
|
id: str
|
||||||
|
conversation_id: str
|
||||||
|
sender_id: str # "self" fuer eigene Nachrichten
|
||||||
|
timestamp: str
|
||||||
|
read: bool = False
|
||||||
|
read_at: Optional[str] = None
|
||||||
|
email_sent: bool = False
|
||||||
|
email_sent_at: Optional[str] = None
|
||||||
|
email_error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# CONVERSATION MODELS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class ConversationBase(BaseModel):
|
||||||
|
"""Basis-Modell fuer Konversationen."""
|
||||||
|
name: Optional[str] = None
|
||||||
|
is_group: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class Conversation(ConversationBase):
|
||||||
|
"""Vollstaendige Konversation mit ID."""
|
||||||
|
id: str
|
||||||
|
participant_ids: List[str] = []
|
||||||
|
group_id: Optional[str] = None
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
last_message: Optional[str] = None
|
||||||
|
last_message_time: Optional[str] = None
|
||||||
|
unread_count: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class CSVImportResult(BaseModel):
|
||||||
|
"""Ergebnis eines CSV-Imports."""
|
||||||
|
imported: int
|
||||||
|
skipped: int
|
||||||
|
errors: List[str]
|
||||||
|
contacts: List[Contact]
|
||||||
@@ -1,21 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to messenger/api.py
|
||||||
BreakPilot Messenger API — Barrel Re-export.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Stellt Endpoints fuer Kontakte, Konversationen, Nachrichten,
|
_sys.modules[__name__] = _importlib.import_module("messenger.api")
|
||||||
CSV-Import, Gruppenmanagement und Templates bereit.
|
|
||||||
|
|
||||||
Split into:
|
|
||||||
- messenger_models.py: Pydantic models
|
|
||||||
- messenger_helpers.py: JSON file storage & default templates
|
|
||||||
- messenger_contacts.py: Contact CRUD & CSV import/export
|
|
||||||
- messenger_conversations.py: Conversations, messages, groups, templates, stats
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from messenger_contacts import router as _contacts_router
|
|
||||||
from messenger_conversations import router as _conversations_router
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/messenger", tags=["Messenger"])
|
|
||||||
router.include_router(_contacts_router)
|
|
||||||
router.include_router(_conversations_router)
|
|
||||||
|
|||||||
@@ -1,251 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to messenger/contacts.py
|
||||||
Messenger API - Contact Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
CRUD, CSV import/export for contacts.
|
_sys.modules[__name__] = _importlib.import_module("messenger.contacts")
|
||||||
"""
|
|
||||||
|
|
||||||
import csv
|
|
||||||
import uuid
|
|
||||||
from io import StringIO
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, UploadFile, File, Query
|
|
||||||
from fastapi.responses import StreamingResponse
|
|
||||||
|
|
||||||
from messenger_models import (
|
|
||||||
Contact,
|
|
||||||
ContactCreate,
|
|
||||||
ContactUpdate,
|
|
||||||
CSVImportResult,
|
|
||||||
)
|
|
||||||
from messenger_helpers import get_contacts, save_contacts
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Messenger"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# CONTACTS ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/contacts", response_model=List[Contact])
|
|
||||||
async def list_contacts(
|
|
||||||
role: Optional[str] = Query(None, description="Filter by role"),
|
|
||||||
class_name: Optional[str] = Query(None, description="Filter by class"),
|
|
||||||
search: Optional[str] = Query(None, description="Search in name/email")
|
|
||||||
):
|
|
||||||
"""Listet alle Kontakte auf."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
|
|
||||||
# Filter anwenden
|
|
||||||
if role:
|
|
||||||
contacts = [c for c in contacts if c.get("role") == role]
|
|
||||||
if class_name:
|
|
||||||
contacts = [c for c in contacts if c.get("class_name") == class_name]
|
|
||||||
if search:
|
|
||||||
search_lower = search.lower()
|
|
||||||
contacts = [c for c in contacts if
|
|
||||||
search_lower in c.get("name", "").lower() or
|
|
||||||
search_lower in (c.get("email") or "").lower() or
|
|
||||||
search_lower in (c.get("student_name") or "").lower()]
|
|
||||||
|
|
||||||
return contacts
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/contacts", response_model=Contact)
|
|
||||||
async def create_contact(contact: ContactCreate):
|
|
||||||
"""Erstellt einen neuen Kontakt."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
|
|
||||||
# Pruefen ob Email bereits existiert
|
|
||||||
if contact.email:
|
|
||||||
existing = [c for c in contacts if c.get("email") == contact.email]
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(status_code=400, detail="Kontakt mit dieser Email existiert bereits")
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
new_contact = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"created_at": now,
|
|
||||||
"updated_at": now,
|
|
||||||
"online": False,
|
|
||||||
"last_seen": None,
|
|
||||||
**contact.dict()
|
|
||||||
}
|
|
||||||
|
|
||||||
contacts.append(new_contact)
|
|
||||||
save_contacts(contacts)
|
|
||||||
|
|
||||||
return new_contact
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/contacts/{contact_id}", response_model=Contact)
|
|
||||||
async def get_contact(contact_id: str):
|
|
||||||
"""Ruft einen einzelnen Kontakt ab."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
contact = next((c for c in contacts if c["id"] == contact_id), None)
|
|
||||||
|
|
||||||
if not contact:
|
|
||||||
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
|
||||||
|
|
||||||
return contact
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/contacts/{contact_id}", response_model=Contact)
|
|
||||||
async def update_contact(contact_id: str, update: ContactUpdate):
|
|
||||||
"""Aktualisiert einen Kontakt."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
contact_idx = next((i for i, c in enumerate(contacts) if c["id"] == contact_id), None)
|
|
||||||
|
|
||||||
if contact_idx is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
|
||||||
|
|
||||||
update_data = update.dict(exclude_unset=True)
|
|
||||||
contacts[contact_idx].update(update_data)
|
|
||||||
contacts[contact_idx]["updated_at"] = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
save_contacts(contacts)
|
|
||||||
return contacts[contact_idx]
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/contacts/{contact_id}")
|
|
||||||
async def delete_contact(contact_id: str):
|
|
||||||
"""Loescht einen Kontakt."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
contacts = [c for c in contacts if c["id"] != contact_id]
|
|
||||||
save_contacts(contacts)
|
|
||||||
|
|
||||||
return {"status": "deleted", "id": contact_id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/contacts/import", response_model=CSVImportResult)
|
|
||||||
async def import_contacts_csv(file: UploadFile = File(...)):
|
|
||||||
"""
|
|
||||||
Importiert Kontakte aus einer CSV-Datei.
|
|
||||||
|
|
||||||
Erwartete Spalten:
|
|
||||||
- name (required)
|
|
||||||
- email
|
|
||||||
- phone
|
|
||||||
- role (parent/teacher/staff/student)
|
|
||||||
- student_name
|
|
||||||
- class_name
|
|
||||||
- notes
|
|
||||||
- tags (komma-separiert)
|
|
||||||
"""
|
|
||||||
if not file.filename.endswith('.csv'):
|
|
||||||
raise HTTPException(status_code=400, detail="Nur CSV-Dateien werden unterstuetzt")
|
|
||||||
|
|
||||||
content = await file.read()
|
|
||||||
try:
|
|
||||||
text = content.decode('utf-8')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
text = content.decode('latin-1')
|
|
||||||
|
|
||||||
contacts = get_contacts()
|
|
||||||
existing_emails = {c.get("email") for c in contacts if c.get("email")}
|
|
||||||
|
|
||||||
imported = []
|
|
||||||
skipped = 0
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
reader = csv.DictReader(StringIO(text), delimiter=';') # Deutsche CSV meist mit Semikolon
|
|
||||||
if not reader.fieldnames or 'name' not in [f.lower() for f in reader.fieldnames]:
|
|
||||||
# Versuche mit Komma
|
|
||||||
reader = csv.DictReader(StringIO(text), delimiter=',')
|
|
||||||
|
|
||||||
for row_num, row in enumerate(reader, start=2):
|
|
||||||
try:
|
|
||||||
# Normalisiere Spaltennamen
|
|
||||||
row = {k.lower().strip(): v.strip() if v else "" for k, v in row.items()}
|
|
||||||
|
|
||||||
name = row.get('name') or row.get('kontakt') or row.get('elternname')
|
|
||||||
if not name:
|
|
||||||
errors.append(f"Zeile {row_num}: Name fehlt")
|
|
||||||
skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
email = row.get('email') or row.get('e-mail') or row.get('mail')
|
|
||||||
if email and email in existing_emails:
|
|
||||||
errors.append(f"Zeile {row_num}: Email {email} existiert bereits")
|
|
||||||
skipped += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
tags_str = row.get('tags') or row.get('kategorien') or ""
|
|
||||||
tags = [t.strip() for t in tags_str.split(',') if t.strip()]
|
|
||||||
|
|
||||||
# Matrix-ID und preferred_channel auslesen
|
|
||||||
matrix_id = row.get('matrix_id') or row.get('matrix') or None
|
|
||||||
preferred_channel = row.get('preferred_channel') or row.get('kanal') or "email"
|
|
||||||
if preferred_channel not in ["email", "matrix", "pwa"]:
|
|
||||||
preferred_channel = "email"
|
|
||||||
|
|
||||||
new_contact = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"email": email if email else None,
|
|
||||||
"phone": row.get('phone') or row.get('telefon') or row.get('tel'),
|
|
||||||
"role": row.get('role') or row.get('rolle') or "parent",
|
|
||||||
"student_name": row.get('student_name') or row.get('schueler') or row.get('kind'),
|
|
||||||
"class_name": row.get('class_name') or row.get('klasse'),
|
|
||||||
"notes": row.get('notes') or row.get('notizen') or row.get('bemerkungen'),
|
|
||||||
"tags": tags,
|
|
||||||
"matrix_id": matrix_id if matrix_id else None,
|
|
||||||
"preferred_channel": preferred_channel,
|
|
||||||
"created_at": now,
|
|
||||||
"updated_at": now,
|
|
||||||
"online": False,
|
|
||||||
"last_seen": None
|
|
||||||
}
|
|
||||||
|
|
||||||
contacts.append(new_contact)
|
|
||||||
imported.append(new_contact)
|
|
||||||
if email:
|
|
||||||
existing_emails.add(email)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
errors.append(f"Zeile {row_num}: {str(e)}")
|
|
||||||
skipped += 1
|
|
||||||
|
|
||||||
save_contacts(contacts)
|
|
||||||
|
|
||||||
return CSVImportResult(
|
|
||||||
imported=len(imported),
|
|
||||||
skipped=skipped,
|
|
||||||
errors=errors[:20], # Maximal 20 Fehler zurueckgeben
|
|
||||||
contacts=imported
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/contacts/export/csv")
|
|
||||||
async def export_contacts_csv():
|
|
||||||
"""Exportiert alle Kontakte als CSV."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
|
|
||||||
output = StringIO()
|
|
||||||
fieldnames = ['name', 'email', 'phone', 'role', 'student_name', 'class_name', 'notes', 'tags', 'matrix_id', 'preferred_channel']
|
|
||||||
writer = csv.DictWriter(output, fieldnames=fieldnames, delimiter=';')
|
|
||||||
writer.writeheader()
|
|
||||||
|
|
||||||
for contact in contacts:
|
|
||||||
writer.writerow({
|
|
||||||
'name': contact.get('name', ''),
|
|
||||||
'email': contact.get('email', ''),
|
|
||||||
'phone': contact.get('phone', ''),
|
|
||||||
'role': contact.get('role', ''),
|
|
||||||
'student_name': contact.get('student_name', ''),
|
|
||||||
'class_name': contact.get('class_name', ''),
|
|
||||||
'notes': contact.get('notes', ''),
|
|
||||||
'tags': ','.join(contact.get('tags', [])),
|
|
||||||
'matrix_id': contact.get('matrix_id', ''),
|
|
||||||
'preferred_channel': contact.get('preferred_channel', 'email')
|
|
||||||
})
|
|
||||||
|
|
||||||
output.seek(0)
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([output.getvalue()]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={"Content-Disposition": "attachment; filename=kontakte.csv"}
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,405 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to messenger/conversations.py
|
||||||
Messenger API - Conversation, Message, Group, Template & Stats Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Conversations CRUD, message send/read, groups, templates, stats.
|
_sys.modules[__name__] = _importlib.import_module("messenger.conversations")
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query
|
|
||||||
|
|
||||||
from messenger_models import (
|
|
||||||
Conversation,
|
|
||||||
Group,
|
|
||||||
GroupCreate,
|
|
||||||
Message,
|
|
||||||
MessageBase,
|
|
||||||
)
|
|
||||||
from messenger_helpers import (
|
|
||||||
DATA_DIR,
|
|
||||||
DEFAULT_TEMPLATES,
|
|
||||||
get_contacts,
|
|
||||||
get_conversations,
|
|
||||||
save_conversations,
|
|
||||||
get_messages,
|
|
||||||
save_messages,
|
|
||||||
get_groups,
|
|
||||||
save_groups,
|
|
||||||
load_json,
|
|
||||||
save_json,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Messenger"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# GROUPS ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/groups", response_model=List[Group])
|
|
||||||
async def list_groups():
|
|
||||||
"""Listet alle Gruppen auf."""
|
|
||||||
return get_groups()
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/groups", response_model=Group)
|
|
||||||
async def create_group(group: GroupCreate):
|
|
||||||
"""Erstellt eine neue Gruppe."""
|
|
||||||
groups = get_groups()
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
new_group = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"created_at": now,
|
|
||||||
"updated_at": now,
|
|
||||||
**group.dict()
|
|
||||||
}
|
|
||||||
|
|
||||||
groups.append(new_group)
|
|
||||||
save_groups(groups)
|
|
||||||
|
|
||||||
return new_group
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/groups/{group_id}/members")
|
|
||||||
async def update_group_members(group_id: str, member_ids: List[str]):
|
|
||||||
"""Aktualisiert die Mitglieder einer Gruppe."""
|
|
||||||
groups = get_groups()
|
|
||||||
group_idx = next((i for i, g in enumerate(groups) if g["id"] == group_id), None)
|
|
||||||
|
|
||||||
if group_idx is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Gruppe nicht gefunden")
|
|
||||||
|
|
||||||
groups[group_idx]["member_ids"] = member_ids
|
|
||||||
groups[group_idx]["updated_at"] = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
save_groups(groups)
|
|
||||||
return groups[group_idx]
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/groups/{group_id}")
|
|
||||||
async def delete_group(group_id: str):
|
|
||||||
"""Loescht eine Gruppe."""
|
|
||||||
groups = get_groups()
|
|
||||||
groups = [g for g in groups if g["id"] != group_id]
|
|
||||||
save_groups(groups)
|
|
||||||
|
|
||||||
return {"status": "deleted", "id": group_id}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# CONVERSATIONS ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/conversations", response_model=List[Conversation])
|
|
||||||
async def list_conversations():
|
|
||||||
"""Listet alle Konversationen auf."""
|
|
||||||
conversations = get_conversations()
|
|
||||||
messages = get_messages()
|
|
||||||
|
|
||||||
# Unread count und letzte Nachricht hinzufuegen
|
|
||||||
for conv in conversations:
|
|
||||||
conv_messages = [m for m in messages if m.get("conversation_id") == conv["id"]]
|
|
||||||
conv["unread_count"] = len([m for m in conv_messages if not m.get("read") and m.get("sender_id") != "self"])
|
|
||||||
|
|
||||||
if conv_messages:
|
|
||||||
last_msg = max(conv_messages, key=lambda m: m.get("timestamp", ""))
|
|
||||||
conv["last_message"] = last_msg.get("content", "")[:50]
|
|
||||||
conv["last_message_time"] = last_msg.get("timestamp")
|
|
||||||
|
|
||||||
# Nach letzter Nachricht sortieren
|
|
||||||
conversations.sort(key=lambda c: c.get("last_message_time") or "", reverse=True)
|
|
||||||
|
|
||||||
return conversations
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/conversations", response_model=Conversation)
|
|
||||||
async def create_conversation(contact_id: Optional[str] = None, group_id: Optional[str] = None):
|
|
||||||
"""
|
|
||||||
Erstellt eine neue Konversation.
|
|
||||||
Entweder mit einem Kontakt (1:1) oder einer Gruppe.
|
|
||||||
"""
|
|
||||||
conversations = get_conversations()
|
|
||||||
|
|
||||||
if not contact_id and not group_id:
|
|
||||||
raise HTTPException(status_code=400, detail="Entweder contact_id oder group_id erforderlich")
|
|
||||||
|
|
||||||
# Pruefen ob Konversation bereits existiert
|
|
||||||
if contact_id:
|
|
||||||
existing = next((c for c in conversations
|
|
||||||
if not c.get("is_group") and contact_id in c.get("participant_ids", [])), None)
|
|
||||||
if existing:
|
|
||||||
return existing
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
if group_id:
|
|
||||||
groups = get_groups()
|
|
||||||
group = next((g for g in groups if g["id"] == group_id), None)
|
|
||||||
if not group:
|
|
||||||
raise HTTPException(status_code=404, detail="Gruppe nicht gefunden")
|
|
||||||
|
|
||||||
new_conv = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": group.get("name"),
|
|
||||||
"is_group": True,
|
|
||||||
"participant_ids": group.get("member_ids", []),
|
|
||||||
"group_id": group_id,
|
|
||||||
"created_at": now,
|
|
||||||
"updated_at": now,
|
|
||||||
"last_message": None,
|
|
||||||
"last_message_time": None,
|
|
||||||
"unread_count": 0
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
contacts = get_contacts()
|
|
||||||
contact = next((c for c in contacts if c["id"] == contact_id), None)
|
|
||||||
if not contact:
|
|
||||||
raise HTTPException(status_code=404, detail="Kontakt nicht gefunden")
|
|
||||||
|
|
||||||
new_conv = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": contact.get("name"),
|
|
||||||
"is_group": False,
|
|
||||||
"participant_ids": [contact_id],
|
|
||||||
"group_id": None,
|
|
||||||
"created_at": now,
|
|
||||||
"updated_at": now,
|
|
||||||
"last_message": None,
|
|
||||||
"last_message_time": None,
|
|
||||||
"unread_count": 0
|
|
||||||
}
|
|
||||||
|
|
||||||
conversations.append(new_conv)
|
|
||||||
save_conversations(conversations)
|
|
||||||
|
|
||||||
return new_conv
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/conversations/{conversation_id}", response_model=Conversation)
|
|
||||||
async def get_conversation(conversation_id: str):
|
|
||||||
"""Ruft eine Konversation ab."""
|
|
||||||
conversations = get_conversations()
|
|
||||||
conv = next((c for c in conversations if c["id"] == conversation_id), None)
|
|
||||||
|
|
||||||
if not conv:
|
|
||||||
raise HTTPException(status_code=404, detail="Konversation nicht gefunden")
|
|
||||||
|
|
||||||
return conv
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/conversations/{conversation_id}")
|
|
||||||
async def delete_conversation(conversation_id: str):
|
|
||||||
"""Loescht eine Konversation und alle zugehoerigen Nachrichten."""
|
|
||||||
conversations = get_conversations()
|
|
||||||
conversations = [c for c in conversations if c["id"] != conversation_id]
|
|
||||||
save_conversations(conversations)
|
|
||||||
|
|
||||||
messages = get_messages()
|
|
||||||
messages = [m for m in messages if m.get("conversation_id") != conversation_id]
|
|
||||||
save_messages(messages)
|
|
||||||
|
|
||||||
return {"status": "deleted", "id": conversation_id}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# MESSAGES ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/conversations/{conversation_id}/messages", response_model=List[Message])
|
|
||||||
async def list_messages(
|
|
||||||
conversation_id: str,
|
|
||||||
limit: int = Query(50, ge=1, le=200),
|
|
||||||
before: Optional[str] = Query(None, description="Load messages before this timestamp")
|
|
||||||
):
|
|
||||||
"""Ruft Nachrichten einer Konversation ab."""
|
|
||||||
messages = get_messages()
|
|
||||||
conv_messages = [m for m in messages if m.get("conversation_id") == conversation_id]
|
|
||||||
|
|
||||||
if before:
|
|
||||||
conv_messages = [m for m in conv_messages if m.get("timestamp", "") < before]
|
|
||||||
|
|
||||||
# Nach Zeit sortieren (neueste zuletzt)
|
|
||||||
conv_messages.sort(key=lambda m: m.get("timestamp", ""))
|
|
||||||
|
|
||||||
return conv_messages[-limit:]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/conversations/{conversation_id}/messages", response_model=Message)
|
|
||||||
async def send_message(conversation_id: str, message: MessageBase):
|
|
||||||
"""
|
|
||||||
Sendet eine Nachricht in einer Konversation.
|
|
||||||
|
|
||||||
Wenn send_email=True und der Kontakt eine Email-Adresse hat,
|
|
||||||
wird die Nachricht auch per Email versendet.
|
|
||||||
"""
|
|
||||||
conversations = get_conversations()
|
|
||||||
conv = next((c for c in conversations if c["id"] == conversation_id), None)
|
|
||||||
|
|
||||||
if not conv:
|
|
||||||
raise HTTPException(status_code=404, detail="Konversation nicht gefunden")
|
|
||||||
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
new_message = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"conversation_id": conversation_id,
|
|
||||||
"sender_id": "self",
|
|
||||||
"timestamp": now,
|
|
||||||
"read": True,
|
|
||||||
"read_at": now,
|
|
||||||
"email_sent": False,
|
|
||||||
"email_sent_at": None,
|
|
||||||
"email_error": None,
|
|
||||||
**message.dict()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Email-Versand wenn gewuenscht
|
|
||||||
if message.send_email and not conv.get("is_group"):
|
|
||||||
# Kontakt laden
|
|
||||||
participant_ids = conv.get("participant_ids", [])
|
|
||||||
if participant_ids:
|
|
||||||
contacts = get_contacts()
|
|
||||||
contact = next((c for c in contacts if c["id"] == participant_ids[0]), None)
|
|
||||||
|
|
||||||
if contact and contact.get("email"):
|
|
||||||
try:
|
|
||||||
from email_service import email_service
|
|
||||||
|
|
||||||
result = email_service.send_messenger_notification(
|
|
||||||
to_email=contact["email"],
|
|
||||||
to_name=contact.get("name", ""),
|
|
||||||
sender_name="BreakPilot Lehrer",
|
|
||||||
message_content=message.content
|
|
||||||
)
|
|
||||||
|
|
||||||
if result.success:
|
|
||||||
new_message["email_sent"] = True
|
|
||||||
new_message["email_sent_at"] = result.sent_at
|
|
||||||
else:
|
|
||||||
new_message["email_error"] = result.error
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
new_message["email_error"] = str(e)
|
|
||||||
|
|
||||||
messages = get_messages()
|
|
||||||
messages.append(new_message)
|
|
||||||
save_messages(messages)
|
|
||||||
|
|
||||||
# Konversation aktualisieren
|
|
||||||
conv_idx = next(i for i, c in enumerate(conversations) if c["id"] == conversation_id)
|
|
||||||
conversations[conv_idx]["last_message"] = message.content[:50]
|
|
||||||
conversations[conv_idx]["last_message_time"] = now
|
|
||||||
conversations[conv_idx]["updated_at"] = now
|
|
||||||
save_conversations(conversations)
|
|
||||||
|
|
||||||
return new_message
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/messages/{message_id}/read")
|
|
||||||
async def mark_message_read(message_id: str):
|
|
||||||
"""Markiert eine Nachricht als gelesen."""
|
|
||||||
messages = get_messages()
|
|
||||||
msg_idx = next((i for i, m in enumerate(messages) if m["id"] == message_id), None)
|
|
||||||
|
|
||||||
if msg_idx is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Nachricht nicht gefunden")
|
|
||||||
|
|
||||||
messages[msg_idx]["read"] = True
|
|
||||||
messages[msg_idx]["read_at"] = datetime.utcnow().isoformat()
|
|
||||||
save_messages(messages)
|
|
||||||
|
|
||||||
return {"status": "read", "id": message_id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/conversations/{conversation_id}/read-all")
|
|
||||||
async def mark_all_messages_read(conversation_id: str):
|
|
||||||
"""Markiert alle Nachrichten einer Konversation als gelesen."""
|
|
||||||
messages = get_messages()
|
|
||||||
now = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
for msg in messages:
|
|
||||||
if msg.get("conversation_id") == conversation_id and not msg.get("read"):
|
|
||||||
msg["read"] = True
|
|
||||||
msg["read_at"] = now
|
|
||||||
|
|
||||||
save_messages(messages)
|
|
||||||
|
|
||||||
return {"status": "all_read", "conversation_id": conversation_id}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# TEMPLATES ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/templates")
|
|
||||||
async def list_templates():
|
|
||||||
"""Listet alle Nachrichtenvorlagen auf."""
|
|
||||||
templates_file = DATA_DIR / "templates.json"
|
|
||||||
if templates_file.exists():
|
|
||||||
templates = load_json(templates_file)
|
|
||||||
else:
|
|
||||||
templates = DEFAULT_TEMPLATES
|
|
||||||
save_json(templates_file, templates)
|
|
||||||
|
|
||||||
return templates
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/templates")
|
|
||||||
async def create_template(name: str, content: str, category: str = "custom"):
|
|
||||||
"""Erstellt eine neue Vorlage."""
|
|
||||||
templates_file = DATA_DIR / "templates.json"
|
|
||||||
templates = load_json(templates_file) if templates_file.exists() else DEFAULT_TEMPLATES.copy()
|
|
||||||
|
|
||||||
new_template = {
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"name": name,
|
|
||||||
"content": content,
|
|
||||||
"category": category
|
|
||||||
}
|
|
||||||
|
|
||||||
templates.append(new_template)
|
|
||||||
save_json(templates_file, templates)
|
|
||||||
|
|
||||||
return new_template
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/templates/{template_id}")
|
|
||||||
async def delete_template(template_id: str):
|
|
||||||
"""Loescht eine Vorlage."""
|
|
||||||
templates_file = DATA_DIR / "templates.json"
|
|
||||||
templates = load_json(templates_file) if templates_file.exists() else DEFAULT_TEMPLATES.copy()
|
|
||||||
|
|
||||||
templates = [t for t in templates if t["id"] != template_id]
|
|
||||||
save_json(templates_file, templates)
|
|
||||||
|
|
||||||
return {"status": "deleted", "id": template_id}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# STATS ENDPOINT
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/stats")
|
|
||||||
async def get_messenger_stats():
|
|
||||||
"""Gibt Statistiken zum Messenger zurueck."""
|
|
||||||
contacts = get_contacts()
|
|
||||||
conversations = get_conversations()
|
|
||||||
messages = get_messages()
|
|
||||||
groups = get_groups()
|
|
||||||
|
|
||||||
unread_total = sum(1 for m in messages if not m.get("read") and m.get("sender_id") != "self")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total_contacts": len(contacts),
|
|
||||||
"total_groups": len(groups),
|
|
||||||
"total_conversations": len(conversations),
|
|
||||||
"total_messages": len(messages),
|
|
||||||
"unread_messages": unread_total,
|
|
||||||
"contacts_by_role": {
|
|
||||||
role: len([c for c in contacts if c.get("role") == role])
|
|
||||||
for role in set(c.get("role", "parent") for c in contacts)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,105 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to messenger/helpers.py
|
||||||
Messenger API - Data Helpers.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
JSON-based file storage for contacts, conversations, messages, and groups.
|
_sys.modules[__name__] = _importlib.import_module("messenger.helpers")
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
from typing import List, Dict
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Datenspeicherung (JSON-basiert fuer einfache Persistenz)
|
|
||||||
DATA_DIR = Path(__file__).parent / "data" / "messenger"
|
|
||||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
CONTACTS_FILE = DATA_DIR / "contacts.json"
|
|
||||||
CONVERSATIONS_FILE = DATA_DIR / "conversations.json"
|
|
||||||
MESSAGES_FILE = DATA_DIR / "messages.json"
|
|
||||||
GROUPS_FILE = DATA_DIR / "groups.json"
|
|
||||||
|
|
||||||
|
|
||||||
def load_json(filepath: Path) -> List[Dict]:
|
|
||||||
"""Laedt JSON-Daten aus Datei."""
|
|
||||||
if not filepath.exists():
|
|
||||||
return []
|
|
||||||
try:
|
|
||||||
with open(filepath, "r", encoding="utf-8") as f:
|
|
||||||
return json.load(f)
|
|
||||||
except Exception:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def save_json(filepath: Path, data: List[Dict]):
|
|
||||||
"""Speichert Daten in JSON-Datei."""
|
|
||||||
with open(filepath, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
||||||
|
|
||||||
|
|
||||||
def get_contacts() -> List[Dict]:
|
|
||||||
return load_json(CONTACTS_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
def save_contacts(contacts: List[Dict]):
|
|
||||||
save_json(CONTACTS_FILE, contacts)
|
|
||||||
|
|
||||||
|
|
||||||
def get_conversations() -> List[Dict]:
|
|
||||||
return load_json(CONVERSATIONS_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
def save_conversations(conversations: List[Dict]):
|
|
||||||
save_json(CONVERSATIONS_FILE, conversations)
|
|
||||||
|
|
||||||
|
|
||||||
def get_messages() -> List[Dict]:
|
|
||||||
return load_json(MESSAGES_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
def save_messages(messages: List[Dict]):
|
|
||||||
save_json(MESSAGES_FILE, messages)
|
|
||||||
|
|
||||||
|
|
||||||
def get_groups() -> List[Dict]:
|
|
||||||
return load_json(GROUPS_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
def save_groups(groups: List[Dict]):
|
|
||||||
save_json(GROUPS_FILE, groups)
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# DEFAULT TEMPLATES
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
DEFAULT_TEMPLATES = [
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"name": "Terminbestaetigung",
|
|
||||||
"content": "Vielen Dank fuer Ihre Terminanfrage. Ich bestaetige den Termin am [DATUM] um [UHRZEIT]. Bitte geben Sie mir Bescheid, falls sich etwas aendern sollte.",
|
|
||||||
"category": "termin"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"name": "Hausaufgaben-Info",
|
|
||||||
"content": "Zur Information: Die Hausaufgaben fuer diese Woche umfassen [THEMA]. Abgabetermin ist [DATUM]. Bei Fragen stehe ich gerne zur Verfuegung.",
|
|
||||||
"category": "hausaufgaben"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "3",
|
|
||||||
"name": "Entschuldigung bestaetigen",
|
|
||||||
"content": "Ich bestaetige den Erhalt der Entschuldigung fuer [NAME] am [DATUM]. Die Fehlzeiten wurden entsprechend vermerkt.",
|
|
||||||
"category": "entschuldigung"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "4",
|
|
||||||
"name": "Gespraechsanfrage",
|
|
||||||
"content": "Ich wuerde gerne einen Termin fuer ein Gespraech mit Ihnen vereinbaren, um [THEMA] zu besprechen. Waeren Sie am [DATUM] um [UHRZEIT] verfuegbar?",
|
|
||||||
"category": "gespraech"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "5",
|
|
||||||
"name": "Krankmeldung bestaetigen",
|
|
||||||
"content": "Vielen Dank fuer Ihre Krankmeldung fuer [NAME]. Ich wuensche gute Besserung. Bitte reichen Sie eine schriftliche Entschuldigung nach, sobald Ihr Kind wieder gesund ist.",
|
|
||||||
"category": "krankmeldung"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,139 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to messenger/models.py
|
||||||
Messenger API - Pydantic Models.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Data models for contacts, conversations, messages, and groups.
|
_sys.modules[__name__] = _importlib.import_module("messenger.models")
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# CONTACT MODELS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class ContactBase(BaseModel):
|
|
||||||
"""Basis-Modell fuer Kontakte."""
|
|
||||||
name: str = Field(..., min_length=1, max_length=200)
|
|
||||||
email: Optional[str] = None
|
|
||||||
phone: Optional[str] = None
|
|
||||||
role: str = Field(default="parent", description="parent, teacher, staff, student")
|
|
||||||
student_name: Optional[str] = Field(None, description="Name des zugehoerigen Schuelers")
|
|
||||||
class_name: Optional[str] = Field(None, description="Klasse z.B. 10a")
|
|
||||||
notes: Optional[str] = None
|
|
||||||
tags: List[str] = Field(default_factory=list)
|
|
||||||
matrix_id: Optional[str] = Field(None, description="Matrix-ID z.B. @user:matrix.org")
|
|
||||||
preferred_channel: str = Field(default="email", description="email, matrix, pwa")
|
|
||||||
|
|
||||||
|
|
||||||
class ContactCreate(ContactBase):
|
|
||||||
"""Model fuer neuen Kontakt."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Contact(ContactBase):
|
|
||||||
"""Vollstaendiger Kontakt mit ID."""
|
|
||||||
id: str
|
|
||||||
created_at: str
|
|
||||||
updated_at: str
|
|
||||||
online: bool = False
|
|
||||||
last_seen: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ContactUpdate(BaseModel):
|
|
||||||
"""Update-Model fuer Kontakte."""
|
|
||||||
name: Optional[str] = None
|
|
||||||
email: Optional[str] = None
|
|
||||||
phone: Optional[str] = None
|
|
||||||
role: Optional[str] = None
|
|
||||||
student_name: Optional[str] = None
|
|
||||||
class_name: Optional[str] = None
|
|
||||||
notes: Optional[str] = None
|
|
||||||
tags: Optional[List[str]] = None
|
|
||||||
matrix_id: Optional[str] = None
|
|
||||||
preferred_channel: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# GROUP MODELS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class GroupBase(BaseModel):
|
|
||||||
"""Basis-Modell fuer Gruppen."""
|
|
||||||
name: str = Field(..., min_length=1, max_length=100)
|
|
||||||
description: Optional[str] = None
|
|
||||||
group_type: str = Field(default="class", description="class, department, custom")
|
|
||||||
|
|
||||||
|
|
||||||
class GroupCreate(GroupBase):
|
|
||||||
"""Model fuer neue Gruppe."""
|
|
||||||
member_ids: List[str] = Field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
class Group(GroupBase):
|
|
||||||
"""Vollstaendige Gruppe mit ID."""
|
|
||||||
id: str
|
|
||||||
member_ids: List[str] = []
|
|
||||||
created_at: str
|
|
||||||
updated_at: str
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# MESSAGE MODELS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class MessageBase(BaseModel):
|
|
||||||
"""Basis-Modell fuer Nachrichten."""
|
|
||||||
content: str = Field(..., min_length=1)
|
|
||||||
content_type: str = Field(default="text", description="text, file, image")
|
|
||||||
file_url: Optional[str] = None
|
|
||||||
send_email: bool = Field(default=False, description="Nachricht auch per Email senden")
|
|
||||||
|
|
||||||
|
|
||||||
class MessageCreate(MessageBase):
|
|
||||||
"""Model fuer neue Nachricht."""
|
|
||||||
conversation_id: str
|
|
||||||
|
|
||||||
|
|
||||||
class Message(MessageBase):
|
|
||||||
"""Vollstaendige Nachricht mit ID."""
|
|
||||||
id: str
|
|
||||||
conversation_id: str
|
|
||||||
sender_id: str # "self" fuer eigene Nachrichten
|
|
||||||
timestamp: str
|
|
||||||
read: bool = False
|
|
||||||
read_at: Optional[str] = None
|
|
||||||
email_sent: bool = False
|
|
||||||
email_sent_at: Optional[str] = None
|
|
||||||
email_error: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# CONVERSATION MODELS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class ConversationBase(BaseModel):
|
|
||||||
"""Basis-Modell fuer Konversationen."""
|
|
||||||
name: Optional[str] = None
|
|
||||||
is_group: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
class Conversation(ConversationBase):
|
|
||||||
"""Vollstaendige Konversation mit ID."""
|
|
||||||
id: str
|
|
||||||
participant_ids: List[str] = []
|
|
||||||
group_id: Optional[str] = None
|
|
||||||
created_at: str
|
|
||||||
updated_at: str
|
|
||||||
last_message: Optional[str] = None
|
|
||||||
last_message_time: Optional[str] = None
|
|
||||||
unread_count: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class CSVImportResult(BaseModel):
|
|
||||||
"""Ergebnis eines CSV-Imports."""
|
|
||||||
imported: int
|
|
||||||
skipped: int
|
|
||||||
errors: List[str]
|
|
||||||
contacts: List[Contact]
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# recording — Meeting recordings, transcription, minutes.
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
"""
|
||||||
|
BreakPilot Recording API — Barrel Re-export.
|
||||||
|
|
||||||
|
Verwaltet Jibri Meeting-Aufzeichnungen und deren Metadaten.
|
||||||
|
Split into:
|
||||||
|
- recording_models.py: Pydantic models & config
|
||||||
|
- recording_helpers.py: In-memory storage & utilities
|
||||||
|
- recording_routes.py: Core recording CRUD routes
|
||||||
|
- recording_transcription.py: Transcription routes
|
||||||
|
- recording_minutes.py: Meeting minutes routes
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .routes import router as _routes_router
|
||||||
|
from .transcription import router as _transcription_router
|
||||||
|
from .minutes import router as _minutes_router
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/recordings", tags=["Recordings"])
|
||||||
|
router.include_router(_routes_router)
|
||||||
|
router.include_router(_transcription_router)
|
||||||
|
router.include_router(_minutes_router)
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
"""
|
||||||
|
Recording API - In-Memory Storage & Helpers.
|
||||||
|
|
||||||
|
Shared state and utility functions for recording endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# IN-MEMORY STORAGE (Dev Mode)
|
||||||
|
# ==========================================
|
||||||
|
# In production, these would be database queries
|
||||||
|
|
||||||
|
_recordings_store: dict = {}
|
||||||
|
_transcriptions_store: dict = {}
|
||||||
|
_audit_log: list = []
|
||||||
|
_minutes_store: dict = {}
|
||||||
|
|
||||||
|
|
||||||
|
def log_audit(
|
||||||
|
action: str,
|
||||||
|
recording_id: Optional[str] = None,
|
||||||
|
transcription_id: Optional[str] = None,
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
metadata: Optional[dict] = None
|
||||||
|
):
|
||||||
|
"""Log audit event for DSGVO compliance."""
|
||||||
|
_audit_log.append({
|
||||||
|
"id": str(uuid.uuid4()),
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"transcription_id": transcription_id,
|
||||||
|
"user_id": user_id,
|
||||||
|
"action": action,
|
||||||
|
"metadata": metadata or {},
|
||||||
|
"created_at": datetime.utcnow().isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def format_vtt_time(ms: int) -> str:
|
||||||
|
"""Format milliseconds to VTT timestamp (HH:MM:SS.mmm)."""
|
||||||
|
hours = ms // 3600000
|
||||||
|
minutes = (ms % 3600000) // 60000
|
||||||
|
seconds = (ms % 60000) // 1000
|
||||||
|
millis = ms % 1000
|
||||||
|
return f"{hours:02d}:{minutes:02d}:{seconds:02d}.{millis:03d}"
|
||||||
|
|
||||||
|
|
||||||
|
def format_srt_time(ms: int) -> str:
|
||||||
|
"""Format milliseconds to SRT timestamp (HH:MM:SS,mmm)."""
|
||||||
|
hours = ms // 3600000
|
||||||
|
minutes = (ms % 3600000) // 60000
|
||||||
|
seconds = (ms % 60000) // 1000
|
||||||
|
millis = ms % 1000
|
||||||
|
return f"{hours:02d}:{minutes:02d}:{seconds:02d},{millis:03d}"
|
||||||
@@ -0,0 +1,187 @@
|
|||||||
|
"""
|
||||||
|
Recording API - Meeting Minutes Routes.
|
||||||
|
|
||||||
|
Generate, retrieve, and export KI-based meeting minutes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
from fastapi.responses import PlainTextResponse, HTMLResponse
|
||||||
|
|
||||||
|
from .helpers import (
|
||||||
|
_recordings_store,
|
||||||
|
_transcriptions_store,
|
||||||
|
_minutes_store,
|
||||||
|
log_audit,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Recordings"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# MEETING MINUTES ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.post("/{recording_id}/minutes")
|
||||||
|
async def generate_meeting_minutes(
|
||||||
|
recording_id: str,
|
||||||
|
title: Optional[str] = Query(None, description="Meeting-Titel"),
|
||||||
|
model: str = Query("breakpilot-teacher-8b", description="LLM Modell")
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Generiert KI-basierte Meeting Minutes aus der Transkription.
|
||||||
|
|
||||||
|
Nutzt das LLM Gateway (Ollama/vLLM) fuer lokale Verarbeitung.
|
||||||
|
"""
|
||||||
|
from meeting_minutes_generator import get_minutes_generator, MeetingMinutes
|
||||||
|
|
||||||
|
# Check recording exists
|
||||||
|
recording = _recordings_store.get(recording_id)
|
||||||
|
if not recording:
|
||||||
|
raise HTTPException(status_code=404, detail="Recording not found")
|
||||||
|
|
||||||
|
# Check transcription exists and is completed
|
||||||
|
transcription = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if not transcription:
|
||||||
|
raise HTTPException(status_code=400, detail="No transcription found. Please transcribe first.")
|
||||||
|
|
||||||
|
if transcription["status"] != "completed":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Transcription not ready. Status: {transcription['status']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if minutes already exist
|
||||||
|
existing = _minutes_store.get(recording_id)
|
||||||
|
if existing and existing.get("status") == "completed":
|
||||||
|
# Return existing minutes
|
||||||
|
return existing
|
||||||
|
|
||||||
|
# Get transcript text
|
||||||
|
transcript_text = transcription.get("full_text", "")
|
||||||
|
if not transcript_text:
|
||||||
|
raise HTTPException(status_code=400, detail="Transcription has no text content")
|
||||||
|
|
||||||
|
# Generate meeting minutes
|
||||||
|
generator = get_minutes_generator()
|
||||||
|
|
||||||
|
try:
|
||||||
|
minutes = await generator.generate(
|
||||||
|
transcript=transcript_text,
|
||||||
|
recording_id=recording_id,
|
||||||
|
transcription_id=transcription["id"],
|
||||||
|
title=title,
|
||||||
|
date=recording.get("recorded_at", "")[:10] if recording.get("recorded_at") else None,
|
||||||
|
duration_minutes=recording.get("duration_seconds", 0) // 60 if recording.get("duration_seconds") else None,
|
||||||
|
participant_count=recording.get("participant_count", 0),
|
||||||
|
model=model
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store minutes
|
||||||
|
minutes_dict = minutes.model_dump()
|
||||||
|
minutes_dict["generated_at"] = minutes.generated_at.isoformat()
|
||||||
|
_minutes_store[recording_id] = minutes_dict
|
||||||
|
|
||||||
|
# Log action
|
||||||
|
log_audit(
|
||||||
|
action="minutes_generated",
|
||||||
|
recording_id=recording_id,
|
||||||
|
metadata={"model": model, "generation_time": minutes.generation_time_seconds}
|
||||||
|
)
|
||||||
|
|
||||||
|
return minutes_dict
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Minutes generation failed: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/minutes")
|
||||||
|
async def get_meeting_minutes(recording_id: str):
|
||||||
|
"""
|
||||||
|
Ruft generierte Meeting Minutes ab.
|
||||||
|
"""
|
||||||
|
minutes = _minutes_store.get(recording_id)
|
||||||
|
if not minutes:
|
||||||
|
raise HTTPException(status_code=404, detail="No meeting minutes found. Generate them first with POST.")
|
||||||
|
|
||||||
|
return minutes
|
||||||
|
|
||||||
|
|
||||||
|
def _load_minutes(recording_id: str):
|
||||||
|
"""Load and convert stored minutes dict back to MeetingMinutes."""
|
||||||
|
from meeting_minutes_generator import MeetingMinutes
|
||||||
|
|
||||||
|
minutes_dict = _minutes_store.get(recording_id)
|
||||||
|
if not minutes_dict:
|
||||||
|
raise HTTPException(status_code=404, detail="No meeting minutes found")
|
||||||
|
|
||||||
|
minutes_dict_copy = minutes_dict.copy()
|
||||||
|
if isinstance(minutes_dict_copy.get("generated_at"), str):
|
||||||
|
minutes_dict_copy["generated_at"] = datetime.fromisoformat(minutes_dict_copy["generated_at"])
|
||||||
|
|
||||||
|
return MeetingMinutes(**minutes_dict_copy)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/minutes/markdown")
|
||||||
|
async def get_minutes_markdown(recording_id: str):
|
||||||
|
"""
|
||||||
|
Exportiert Meeting Minutes als Markdown.
|
||||||
|
"""
|
||||||
|
from meeting_minutes_generator import minutes_to_markdown
|
||||||
|
|
||||||
|
minutes = _load_minutes(recording_id)
|
||||||
|
markdown = minutes_to_markdown(minutes)
|
||||||
|
|
||||||
|
return PlainTextResponse(
|
||||||
|
content=markdown,
|
||||||
|
media_type="text/markdown",
|
||||||
|
headers={"Content-Disposition": f"attachment; filename=protokoll_{recording_id}.md"}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/minutes/html")
|
||||||
|
async def get_minutes_html(recording_id: str):
|
||||||
|
"""
|
||||||
|
Exportiert Meeting Minutes als HTML.
|
||||||
|
"""
|
||||||
|
from meeting_minutes_generator import minutes_to_html
|
||||||
|
|
||||||
|
minutes = _load_minutes(recording_id)
|
||||||
|
html = minutes_to_html(minutes)
|
||||||
|
|
||||||
|
return HTMLResponse(content=html)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/minutes/pdf")
|
||||||
|
async def get_minutes_pdf(recording_id: str):
|
||||||
|
"""
|
||||||
|
Exportiert Meeting Minutes als PDF.
|
||||||
|
|
||||||
|
Benoetigt WeasyPrint (pip install weasyprint).
|
||||||
|
"""
|
||||||
|
from meeting_minutes_generator import minutes_to_html
|
||||||
|
|
||||||
|
minutes = _load_minutes(recording_id)
|
||||||
|
html = minutes_to_html(minutes)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from weasyprint import HTML
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
pdf_bytes = HTML(string=html).write_pdf()
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=pdf_bytes,
|
||||||
|
media_type="application/pdf",
|
||||||
|
headers={"Content-Disposition": f"attachment; filename=protokoll_{recording_id}.pdf"}
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=501,
|
||||||
|
detail="PDF export not available. Install weasyprint: pip install weasyprint"
|
||||||
|
)
|
||||||
@@ -0,0 +1,98 @@
|
|||||||
|
"""
|
||||||
|
Recording API - Pydantic Models & Configuration.
|
||||||
|
|
||||||
|
Data models for recording, transcription, and webhook endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# ENVIRONMENT CONFIGURATION
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
MINIO_ENDPOINT = os.getenv("MINIO_ENDPOINT", "minio:9000")
|
||||||
|
MINIO_ACCESS_KEY = os.getenv("MINIO_ACCESS_KEY", "breakpilot")
|
||||||
|
MINIO_SECRET_KEY = os.getenv("MINIO_SECRET_KEY", "breakpilot123")
|
||||||
|
MINIO_BUCKET = os.getenv("MINIO_BUCKET", "breakpilot-recordings")
|
||||||
|
MINIO_SECURE = os.getenv("MINIO_SECURE", "false").lower() == "true"
|
||||||
|
|
||||||
|
# Default retention period in days (DSGVO compliance)
|
||||||
|
DEFAULT_RETENTION_DAYS = int(os.getenv("RECORDING_RETENTION_DAYS", "365"))
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# PYDANTIC MODELS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class JibriWebhookPayload(BaseModel):
|
||||||
|
"""Webhook payload from Jibri finalize.sh script."""
|
||||||
|
event: str = Field(..., description="Event type: recording_completed")
|
||||||
|
recording_name: str = Field(..., description="Unique recording identifier")
|
||||||
|
storage_path: str = Field(..., description="Path in MinIO bucket")
|
||||||
|
audio_path: Optional[str] = Field(None, description="Extracted audio path")
|
||||||
|
file_size_bytes: int = Field(..., description="Video file size in bytes")
|
||||||
|
timestamp: str = Field(..., description="ISO timestamp of upload")
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingCreate(BaseModel):
|
||||||
|
"""Manual recording creation (for testing)."""
|
||||||
|
meeting_id: str
|
||||||
|
title: Optional[str] = None
|
||||||
|
storage_path: str
|
||||||
|
audio_path: Optional[str] = None
|
||||||
|
duration_seconds: Optional[int] = None
|
||||||
|
participant_count: Optional[int] = 0
|
||||||
|
retention_days: Optional[int] = DEFAULT_RETENTION_DAYS
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingResponse(BaseModel):
|
||||||
|
"""Recording details response."""
|
||||||
|
id: str
|
||||||
|
meeting_id: str
|
||||||
|
title: Optional[str]
|
||||||
|
storage_path: str
|
||||||
|
audio_path: Optional[str]
|
||||||
|
file_size_bytes: Optional[int]
|
||||||
|
duration_seconds: Optional[int]
|
||||||
|
participant_count: int
|
||||||
|
status: str
|
||||||
|
recorded_at: datetime
|
||||||
|
retention_days: int
|
||||||
|
retention_expires_at: datetime
|
||||||
|
transcription_status: Optional[str] = None
|
||||||
|
transcription_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingListResponse(BaseModel):
|
||||||
|
"""Paginated list of recordings."""
|
||||||
|
recordings: List[RecordingResponse]
|
||||||
|
total: int
|
||||||
|
page: int
|
||||||
|
page_size: int
|
||||||
|
|
||||||
|
|
||||||
|
class TranscriptionRequest(BaseModel):
|
||||||
|
"""Request to start transcription."""
|
||||||
|
language: str = Field(default="de", description="Language code: de, en, etc.")
|
||||||
|
model: str = Field(default="large-v3", description="Whisper model to use")
|
||||||
|
priority: int = Field(default=0, description="Queue priority (higher = sooner)")
|
||||||
|
|
||||||
|
|
||||||
|
class TranscriptionStatusResponse(BaseModel):
|
||||||
|
"""Transcription status and progress."""
|
||||||
|
id: str
|
||||||
|
recording_id: str
|
||||||
|
status: str
|
||||||
|
language: str
|
||||||
|
model: str
|
||||||
|
word_count: Optional[int]
|
||||||
|
confidence_score: Optional[float]
|
||||||
|
processing_duration_seconds: Optional[int]
|
||||||
|
error_message: Optional[str]
|
||||||
|
created_at: datetime
|
||||||
|
completed_at: Optional[datetime]
|
||||||
@@ -0,0 +1,307 @@
|
|||||||
|
"""
|
||||||
|
Recording API - Core Recording Routes.
|
||||||
|
|
||||||
|
Webhook, CRUD, health, audit, and download endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
JibriWebhookPayload,
|
||||||
|
RecordingResponse,
|
||||||
|
RecordingListResponse,
|
||||||
|
MINIO_ENDPOINT,
|
||||||
|
MINIO_BUCKET,
|
||||||
|
DEFAULT_RETENTION_DAYS,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
_recordings_store,
|
||||||
|
_transcriptions_store,
|
||||||
|
_audit_log,
|
||||||
|
log_audit,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Recordings"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# WEBHOOK ENDPOINT (Jibri)
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.post("/webhook")
|
||||||
|
async def jibri_webhook(payload: JibriWebhookPayload, request: Request):
|
||||||
|
"""
|
||||||
|
Webhook endpoint called by Jibri finalize.sh after upload.
|
||||||
|
|
||||||
|
This creates a new recording entry and optionally triggers transcription.
|
||||||
|
"""
|
||||||
|
if payload.event != "recording_completed":
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=400,
|
||||||
|
content={"error": f"Unknown event type: {payload.event}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract meeting_id from recording_name (format: meetingId_timestamp)
|
||||||
|
parts = payload.recording_name.split("_")
|
||||||
|
meeting_id = parts[0] if parts else payload.recording_name
|
||||||
|
|
||||||
|
# Create recording entry
|
||||||
|
recording_id = str(uuid.uuid4())
|
||||||
|
recorded_at = datetime.utcnow()
|
||||||
|
|
||||||
|
recording = {
|
||||||
|
"id": recording_id,
|
||||||
|
"meeting_id": meeting_id,
|
||||||
|
"jibri_session_id": payload.recording_name,
|
||||||
|
"title": f"Recording {meeting_id}",
|
||||||
|
"storage_path": payload.storage_path,
|
||||||
|
"audio_path": payload.audio_path,
|
||||||
|
"file_size_bytes": payload.file_size_bytes,
|
||||||
|
"duration_seconds": None, # Will be updated after analysis
|
||||||
|
"participant_count": 0,
|
||||||
|
"status": "uploaded",
|
||||||
|
"recorded_at": recorded_at.isoformat(),
|
||||||
|
"retention_days": DEFAULT_RETENTION_DAYS,
|
||||||
|
"created_at": datetime.utcnow().isoformat(),
|
||||||
|
"updated_at": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
_recordings_store[recording_id] = recording
|
||||||
|
|
||||||
|
# Log the creation
|
||||||
|
log_audit(
|
||||||
|
action="created",
|
||||||
|
recording_id=recording_id,
|
||||||
|
metadata={
|
||||||
|
"source": "jibri_webhook",
|
||||||
|
"storage_path": payload.storage_path,
|
||||||
|
"file_size_bytes": payload.file_size_bytes
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"meeting_id": meeting_id,
|
||||||
|
"status": "uploaded",
|
||||||
|
"message": "Recording registered successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# HEALTH & AUDIT ENDPOINTS (must be before parameterized routes)
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def recordings_health():
|
||||||
|
"""Health check for recording service."""
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"recordings_count": len(_recordings_store),
|
||||||
|
"transcriptions_count": len(_transcriptions_store),
|
||||||
|
"minio_endpoint": MINIO_ENDPOINT,
|
||||||
|
"bucket": MINIO_BUCKET
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/audit/log")
|
||||||
|
async def get_audit_log(
|
||||||
|
recording_id: Optional[str] = Query(None),
|
||||||
|
action: Optional[str] = Query(None),
|
||||||
|
limit: int = Query(100, ge=1, le=1000)
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get audit log entries (DSGVO compliance).
|
||||||
|
|
||||||
|
Admin-only endpoint for reviewing recording access history.
|
||||||
|
"""
|
||||||
|
logs = _audit_log.copy()
|
||||||
|
|
||||||
|
if recording_id:
|
||||||
|
logs = [l for l in logs if l.get("recording_id") == recording_id]
|
||||||
|
if action:
|
||||||
|
logs = [l for l in logs if l.get("action") == action]
|
||||||
|
|
||||||
|
# Sort by created_at descending
|
||||||
|
logs.sort(key=lambda x: x["created_at"], reverse=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"entries": logs[:limit],
|
||||||
|
"total": len(logs)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# RECORDING MANAGEMENT ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.get("/", response_model=RecordingListResponse)
|
||||||
|
async def list_recordings(
|
||||||
|
status: Optional[str] = Query(None, description="Filter by status"),
|
||||||
|
meeting_id: Optional[str] = Query(None, description="Filter by meeting ID"),
|
||||||
|
page: int = Query(1, ge=1, description="Page number"),
|
||||||
|
page_size: int = Query(20, ge=1, le=100, description="Items per page")
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
List all recordings with optional filtering.
|
||||||
|
|
||||||
|
Supports pagination and filtering by status or meeting ID.
|
||||||
|
"""
|
||||||
|
# Filter recordings
|
||||||
|
recordings = list(_recordings_store.values())
|
||||||
|
|
||||||
|
if status:
|
||||||
|
recordings = [r for r in recordings if r["status"] == status]
|
||||||
|
if meeting_id:
|
||||||
|
recordings = [r for r in recordings if r["meeting_id"] == meeting_id]
|
||||||
|
|
||||||
|
# Sort by recorded_at descending
|
||||||
|
recordings.sort(key=lambda x: x["recorded_at"], reverse=True)
|
||||||
|
|
||||||
|
# Paginate
|
||||||
|
total = len(recordings)
|
||||||
|
start = (page - 1) * page_size
|
||||||
|
end = start + page_size
|
||||||
|
page_recordings = recordings[start:end]
|
||||||
|
|
||||||
|
# Convert to response format
|
||||||
|
result = []
|
||||||
|
for rec in page_recordings:
|
||||||
|
recorded_at = datetime.fromisoformat(rec["recorded_at"])
|
||||||
|
retention_expires = recorded_at + timedelta(days=rec["retention_days"])
|
||||||
|
|
||||||
|
# Check for transcription
|
||||||
|
trans = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == rec["id"]),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
|
||||||
|
result.append(RecordingResponse(
|
||||||
|
id=rec["id"],
|
||||||
|
meeting_id=rec["meeting_id"],
|
||||||
|
title=rec.get("title"),
|
||||||
|
storage_path=rec["storage_path"],
|
||||||
|
audio_path=rec.get("audio_path"),
|
||||||
|
file_size_bytes=rec.get("file_size_bytes"),
|
||||||
|
duration_seconds=rec.get("duration_seconds"),
|
||||||
|
participant_count=rec.get("participant_count", 0),
|
||||||
|
status=rec["status"],
|
||||||
|
recorded_at=recorded_at,
|
||||||
|
retention_days=rec["retention_days"],
|
||||||
|
retention_expires_at=retention_expires,
|
||||||
|
transcription_status=trans["status"] if trans else None,
|
||||||
|
transcription_id=trans["id"] if trans else None
|
||||||
|
))
|
||||||
|
|
||||||
|
return RecordingListResponse(
|
||||||
|
recordings=result,
|
||||||
|
total=total,
|
||||||
|
page=page,
|
||||||
|
page_size=page_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}", response_model=RecordingResponse)
|
||||||
|
async def get_recording(recording_id: str):
|
||||||
|
"""
|
||||||
|
Get details for a specific recording.
|
||||||
|
"""
|
||||||
|
recording = _recordings_store.get(recording_id)
|
||||||
|
if not recording:
|
||||||
|
raise HTTPException(status_code=404, detail="Recording not found")
|
||||||
|
|
||||||
|
# Log view action
|
||||||
|
log_audit(action="viewed", recording_id=recording_id)
|
||||||
|
|
||||||
|
recorded_at = datetime.fromisoformat(recording["recorded_at"])
|
||||||
|
retention_expires = recorded_at + timedelta(days=recording["retention_days"])
|
||||||
|
|
||||||
|
# Check for transcription
|
||||||
|
trans = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
|
||||||
|
return RecordingResponse(
|
||||||
|
id=recording["id"],
|
||||||
|
meeting_id=recording["meeting_id"],
|
||||||
|
title=recording.get("title"),
|
||||||
|
storage_path=recording["storage_path"],
|
||||||
|
audio_path=recording.get("audio_path"),
|
||||||
|
file_size_bytes=recording.get("file_size_bytes"),
|
||||||
|
duration_seconds=recording.get("duration_seconds"),
|
||||||
|
participant_count=recording.get("participant_count", 0),
|
||||||
|
status=recording["status"],
|
||||||
|
recorded_at=recorded_at,
|
||||||
|
retention_days=recording["retention_days"],
|
||||||
|
retention_expires_at=retention_expires,
|
||||||
|
transcription_status=trans["status"] if trans else None,
|
||||||
|
transcription_id=trans["id"] if trans else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{recording_id}")
|
||||||
|
async def delete_recording(
|
||||||
|
recording_id: str,
|
||||||
|
reason: str = Query(..., description="Reason for deletion (DSGVO audit)")
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Soft-delete a recording (DSGVO compliance).
|
||||||
|
|
||||||
|
The recording is marked as deleted but retained for audit purposes.
|
||||||
|
Actual file deletion happens after the audit retention period.
|
||||||
|
"""
|
||||||
|
recording = _recordings_store.get(recording_id)
|
||||||
|
if not recording:
|
||||||
|
raise HTTPException(status_code=404, detail="Recording not found")
|
||||||
|
|
||||||
|
# Soft delete
|
||||||
|
recording["status"] = "deleted"
|
||||||
|
recording["deleted_at"] = datetime.utcnow().isoformat()
|
||||||
|
recording["updated_at"] = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
# Log deletion with reason
|
||||||
|
log_audit(
|
||||||
|
action="deleted",
|
||||||
|
recording_id=recording_id,
|
||||||
|
metadata={"reason": reason}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"status": "deleted",
|
||||||
|
"message": "Recording marked for deletion"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/download")
|
||||||
|
async def download_recording(recording_id: str):
|
||||||
|
"""
|
||||||
|
Download the recording file.
|
||||||
|
|
||||||
|
In production, this would generate a presigned URL to MinIO.
|
||||||
|
"""
|
||||||
|
recording = _recordings_store.get(recording_id)
|
||||||
|
if not recording:
|
||||||
|
raise HTTPException(status_code=404, detail="Recording not found")
|
||||||
|
|
||||||
|
if recording["status"] == "deleted":
|
||||||
|
raise HTTPException(status_code=410, detail="Recording has been deleted")
|
||||||
|
|
||||||
|
# Log download action
|
||||||
|
log_audit(action="downloaded", recording_id=recording_id)
|
||||||
|
|
||||||
|
# In production, generate presigned URL to MinIO
|
||||||
|
# For now, return info about where the file is
|
||||||
|
return {
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"storage_path": recording["storage_path"],
|
||||||
|
"file_size_bytes": recording.get("file_size_bytes"),
|
||||||
|
"message": "In production, this would redirect to a presigned MinIO URL"
|
||||||
|
}
|
||||||
@@ -0,0 +1,250 @@
|
|||||||
|
"""
|
||||||
|
Recording API - Transcription Routes.
|
||||||
|
|
||||||
|
Start transcription, get status, download VTT/SRT subtitle files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from fastapi.responses import PlainTextResponse
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
TranscriptionRequest,
|
||||||
|
TranscriptionStatusResponse,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
_recordings_store,
|
||||||
|
_transcriptions_store,
|
||||||
|
log_audit,
|
||||||
|
format_vtt_time,
|
||||||
|
format_srt_time,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Recordings"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# TRANSCRIPTION ENDPOINTS
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
@router.post("/{recording_id}/transcribe", response_model=TranscriptionStatusResponse)
|
||||||
|
async def start_transcription(recording_id: str, request: TranscriptionRequest):
|
||||||
|
"""
|
||||||
|
Start transcription for a recording.
|
||||||
|
|
||||||
|
Queues the recording for processing by the transcription worker.
|
||||||
|
"""
|
||||||
|
recording = _recordings_store.get(recording_id)
|
||||||
|
if not recording:
|
||||||
|
raise HTTPException(status_code=404, detail="Recording not found")
|
||||||
|
|
||||||
|
if recording["status"] == "deleted":
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot transcribe deleted recording")
|
||||||
|
|
||||||
|
# Check if transcription already exists
|
||||||
|
existing = next(
|
||||||
|
(t for t in _transcriptions_store.values()
|
||||||
|
if t["recording_id"] == recording_id and t["status"] != "failed"),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=409,
|
||||||
|
detail=f"Transcription already exists with status: {existing['status']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create transcription entry
|
||||||
|
transcription_id = str(uuid.uuid4())
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
transcription = {
|
||||||
|
"id": transcription_id,
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"language": request.language,
|
||||||
|
"model": request.model,
|
||||||
|
"status": "pending",
|
||||||
|
"full_text": None,
|
||||||
|
"word_count": None,
|
||||||
|
"confidence_score": None,
|
||||||
|
"vtt_path": None,
|
||||||
|
"srt_path": None,
|
||||||
|
"json_path": None,
|
||||||
|
"error_message": None,
|
||||||
|
"processing_started_at": None,
|
||||||
|
"processing_completed_at": None,
|
||||||
|
"processing_duration_seconds": None,
|
||||||
|
"created_at": now.isoformat(),
|
||||||
|
"updated_at": now.isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
_transcriptions_store[transcription_id] = transcription
|
||||||
|
|
||||||
|
# Update recording status
|
||||||
|
recording["status"] = "processing"
|
||||||
|
recording["updated_at"] = now.isoformat()
|
||||||
|
|
||||||
|
# Log transcription start
|
||||||
|
log_audit(
|
||||||
|
action="transcription_started",
|
||||||
|
recording_id=recording_id,
|
||||||
|
transcription_id=transcription_id,
|
||||||
|
metadata={"language": request.language, "model": request.model}
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Queue job to Redis/Valkey for transcription worker
|
||||||
|
|
||||||
|
return TranscriptionStatusResponse(
|
||||||
|
id=transcription_id,
|
||||||
|
recording_id=recording_id,
|
||||||
|
status="pending",
|
||||||
|
language=request.language,
|
||||||
|
model=request.model,
|
||||||
|
word_count=None,
|
||||||
|
confidence_score=None,
|
||||||
|
processing_duration_seconds=None,
|
||||||
|
error_message=None,
|
||||||
|
created_at=now,
|
||||||
|
completed_at=None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/transcription", response_model=TranscriptionStatusResponse)
|
||||||
|
async def get_transcription_status(recording_id: str):
|
||||||
|
"""
|
||||||
|
Get transcription status for a recording.
|
||||||
|
"""
|
||||||
|
transcription = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if not transcription:
|
||||||
|
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
||||||
|
|
||||||
|
return TranscriptionStatusResponse(
|
||||||
|
id=transcription["id"],
|
||||||
|
recording_id=transcription["recording_id"],
|
||||||
|
status=transcription["status"],
|
||||||
|
language=transcription["language"],
|
||||||
|
model=transcription["model"],
|
||||||
|
word_count=transcription.get("word_count"),
|
||||||
|
confidence_score=transcription.get("confidence_score"),
|
||||||
|
processing_duration_seconds=transcription.get("processing_duration_seconds"),
|
||||||
|
error_message=transcription.get("error_message"),
|
||||||
|
created_at=datetime.fromisoformat(transcription["created_at"]),
|
||||||
|
completed_at=(
|
||||||
|
datetime.fromisoformat(transcription["processing_completed_at"])
|
||||||
|
if transcription.get("processing_completed_at") else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/transcription/text")
|
||||||
|
async def get_transcription_text(recording_id: str):
|
||||||
|
"""
|
||||||
|
Get the full transcription text.
|
||||||
|
"""
|
||||||
|
transcription = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if not transcription:
|
||||||
|
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
||||||
|
|
||||||
|
if transcription["status"] != "completed":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Transcription not ready. Status: {transcription['status']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"transcription_id": transcription["id"],
|
||||||
|
"recording_id": recording_id,
|
||||||
|
"language": transcription["language"],
|
||||||
|
"text": transcription.get("full_text", ""),
|
||||||
|
"word_count": transcription.get("word_count", 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/transcription/vtt")
|
||||||
|
async def get_transcription_vtt(recording_id: str):
|
||||||
|
"""
|
||||||
|
Download transcription as WebVTT subtitle file.
|
||||||
|
"""
|
||||||
|
transcription = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if not transcription:
|
||||||
|
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
||||||
|
|
||||||
|
if transcription["status"] != "completed":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Transcription not ready. Status: {transcription['status']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate VTT content
|
||||||
|
vtt_content = "WEBVTT\n\n"
|
||||||
|
text = transcription.get("full_text", "")
|
||||||
|
|
||||||
|
if text:
|
||||||
|
sentences = text.replace(".", ".\n").split("\n")
|
||||||
|
time_offset = 0
|
||||||
|
for sentence in sentences:
|
||||||
|
sentence = sentence.strip()
|
||||||
|
if sentence:
|
||||||
|
start = format_vtt_time(time_offset)
|
||||||
|
time_offset += 3000
|
||||||
|
end = format_vtt_time(time_offset)
|
||||||
|
vtt_content += f"{start} --> {end}\n{sentence}\n\n"
|
||||||
|
|
||||||
|
return PlainTextResponse(
|
||||||
|
content=vtt_content,
|
||||||
|
media_type="text/vtt",
|
||||||
|
headers={"Content-Disposition": f"attachment; filename=transcript_{recording_id}.vtt"}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{recording_id}/transcription/srt")
|
||||||
|
async def get_transcription_srt(recording_id: str):
|
||||||
|
"""
|
||||||
|
Download transcription as SRT subtitle file.
|
||||||
|
"""
|
||||||
|
transcription = next(
|
||||||
|
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
if not transcription:
|
||||||
|
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
||||||
|
|
||||||
|
if transcription["status"] != "completed":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Transcription not ready. Status: {transcription['status']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate SRT content
|
||||||
|
srt_content = ""
|
||||||
|
text = transcription.get("full_text", "")
|
||||||
|
|
||||||
|
if text:
|
||||||
|
sentences = text.replace(".", ".\n").split("\n")
|
||||||
|
time_offset = 0
|
||||||
|
index = 1
|
||||||
|
for sentence in sentences:
|
||||||
|
sentence = sentence.strip()
|
||||||
|
if sentence:
|
||||||
|
start = format_srt_time(time_offset)
|
||||||
|
time_offset += 3000
|
||||||
|
end = format_srt_time(time_offset)
|
||||||
|
srt_content += f"{index}\n{start} --> {end}\n{sentence}\n\n"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
return PlainTextResponse(
|
||||||
|
content=srt_content,
|
||||||
|
media_type="text/plain",
|
||||||
|
headers={"Content-Disposition": f"attachment; filename=transcript_{recording_id}.srt"}
|
||||||
|
)
|
||||||
@@ -1,22 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/api.py
|
||||||
BreakPilot Recording API — Barrel Re-export.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Verwaltet Jibri Meeting-Aufzeichnungen und deren Metadaten.
|
_sys.modules[__name__] = _importlib.import_module("recording.api")
|
||||||
Split into:
|
|
||||||
- recording_models.py: Pydantic models & config
|
|
||||||
- recording_helpers.py: In-memory storage & utilities
|
|
||||||
- recording_routes.py: Core recording CRUD routes
|
|
||||||
- recording_transcription.py: Transcription routes
|
|
||||||
- recording_minutes.py: Meeting minutes routes
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from recording_routes import router as _routes_router
|
|
||||||
from recording_transcription import router as _transcription_router
|
|
||||||
from recording_minutes import router as _minutes_router
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/recordings", tags=["Recordings"])
|
|
||||||
router.include_router(_routes_router)
|
|
||||||
router.include_router(_transcription_router)
|
|
||||||
router.include_router(_minutes_router)
|
|
||||||
|
|||||||
@@ -1,57 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/helpers.py
|
||||||
Recording API - In-Memory Storage & Helpers.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Shared state and utility functions for recording endpoints.
|
_sys.modules[__name__] = _importlib.import_module("recording.helpers")
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# IN-MEMORY STORAGE (Dev Mode)
|
|
||||||
# ==========================================
|
|
||||||
# In production, these would be database queries
|
|
||||||
|
|
||||||
_recordings_store: dict = {}
|
|
||||||
_transcriptions_store: dict = {}
|
|
||||||
_audit_log: list = []
|
|
||||||
_minutes_store: dict = {}
|
|
||||||
|
|
||||||
|
|
||||||
def log_audit(
|
|
||||||
action: str,
|
|
||||||
recording_id: Optional[str] = None,
|
|
||||||
transcription_id: Optional[str] = None,
|
|
||||||
user_id: Optional[str] = None,
|
|
||||||
metadata: Optional[dict] = None
|
|
||||||
):
|
|
||||||
"""Log audit event for DSGVO compliance."""
|
|
||||||
_audit_log.append({
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"transcription_id": transcription_id,
|
|
||||||
"user_id": user_id,
|
|
||||||
"action": action,
|
|
||||||
"metadata": metadata or {},
|
|
||||||
"created_at": datetime.utcnow().isoformat()
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def format_vtt_time(ms: int) -> str:
|
|
||||||
"""Format milliseconds to VTT timestamp (HH:MM:SS.mmm)."""
|
|
||||||
hours = ms // 3600000
|
|
||||||
minutes = (ms % 3600000) // 60000
|
|
||||||
seconds = (ms % 60000) // 1000
|
|
||||||
millis = ms % 1000
|
|
||||||
return f"{hours:02d}:{minutes:02d}:{seconds:02d}.{millis:03d}"
|
|
||||||
|
|
||||||
|
|
||||||
def format_srt_time(ms: int) -> str:
|
|
||||||
"""Format milliseconds to SRT timestamp (HH:MM:SS,mmm)."""
|
|
||||||
hours = ms // 3600000
|
|
||||||
minutes = (ms % 3600000) // 60000
|
|
||||||
seconds = (ms % 60000) // 1000
|
|
||||||
millis = ms % 1000
|
|
||||||
return f"{hours:02d}:{minutes:02d}:{seconds:02d},{millis:03d}"
|
|
||||||
|
|||||||
@@ -1,187 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/minutes.py
|
||||||
Recording API - Meeting Minutes Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Generate, retrieve, and export KI-based meeting minutes.
|
_sys.modules[__name__] = _importlib.import_module("recording.minutes")
|
||||||
"""
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query
|
|
||||||
from fastapi.responses import PlainTextResponse, HTMLResponse
|
|
||||||
|
|
||||||
from recording_helpers import (
|
|
||||||
_recordings_store,
|
|
||||||
_transcriptions_store,
|
|
||||||
_minutes_store,
|
|
||||||
log_audit,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Recordings"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# MEETING MINUTES ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.post("/{recording_id}/minutes")
|
|
||||||
async def generate_meeting_minutes(
|
|
||||||
recording_id: str,
|
|
||||||
title: Optional[str] = Query(None, description="Meeting-Titel"),
|
|
||||||
model: str = Query("breakpilot-teacher-8b", description="LLM Modell")
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Generiert KI-basierte Meeting Minutes aus der Transkription.
|
|
||||||
|
|
||||||
Nutzt das LLM Gateway (Ollama/vLLM) fuer lokale Verarbeitung.
|
|
||||||
"""
|
|
||||||
from meeting_minutes_generator import get_minutes_generator, MeetingMinutes
|
|
||||||
|
|
||||||
# Check recording exists
|
|
||||||
recording = _recordings_store.get(recording_id)
|
|
||||||
if not recording:
|
|
||||||
raise HTTPException(status_code=404, detail="Recording not found")
|
|
||||||
|
|
||||||
# Check transcription exists and is completed
|
|
||||||
transcription = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not transcription:
|
|
||||||
raise HTTPException(status_code=400, detail="No transcription found. Please transcribe first.")
|
|
||||||
|
|
||||||
if transcription["status"] != "completed":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Transcription not ready. Status: {transcription['status']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if minutes already exist
|
|
||||||
existing = _minutes_store.get(recording_id)
|
|
||||||
if existing and existing.get("status") == "completed":
|
|
||||||
# Return existing minutes
|
|
||||||
return existing
|
|
||||||
|
|
||||||
# Get transcript text
|
|
||||||
transcript_text = transcription.get("full_text", "")
|
|
||||||
if not transcript_text:
|
|
||||||
raise HTTPException(status_code=400, detail="Transcription has no text content")
|
|
||||||
|
|
||||||
# Generate meeting minutes
|
|
||||||
generator = get_minutes_generator()
|
|
||||||
|
|
||||||
try:
|
|
||||||
minutes = await generator.generate(
|
|
||||||
transcript=transcript_text,
|
|
||||||
recording_id=recording_id,
|
|
||||||
transcription_id=transcription["id"],
|
|
||||||
title=title,
|
|
||||||
date=recording.get("recorded_at", "")[:10] if recording.get("recorded_at") else None,
|
|
||||||
duration_minutes=recording.get("duration_seconds", 0) // 60 if recording.get("duration_seconds") else None,
|
|
||||||
participant_count=recording.get("participant_count", 0),
|
|
||||||
model=model
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store minutes
|
|
||||||
minutes_dict = minutes.model_dump()
|
|
||||||
minutes_dict["generated_at"] = minutes.generated_at.isoformat()
|
|
||||||
_minutes_store[recording_id] = minutes_dict
|
|
||||||
|
|
||||||
# Log action
|
|
||||||
log_audit(
|
|
||||||
action="minutes_generated",
|
|
||||||
recording_id=recording_id,
|
|
||||||
metadata={"model": model, "generation_time": minutes.generation_time_seconds}
|
|
||||||
)
|
|
||||||
|
|
||||||
return minutes_dict
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Minutes generation failed: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/minutes")
|
|
||||||
async def get_meeting_minutes(recording_id: str):
|
|
||||||
"""
|
|
||||||
Ruft generierte Meeting Minutes ab.
|
|
||||||
"""
|
|
||||||
minutes = _minutes_store.get(recording_id)
|
|
||||||
if not minutes:
|
|
||||||
raise HTTPException(status_code=404, detail="No meeting minutes found. Generate them first with POST.")
|
|
||||||
|
|
||||||
return minutes
|
|
||||||
|
|
||||||
|
|
||||||
def _load_minutes(recording_id: str):
|
|
||||||
"""Load and convert stored minutes dict back to MeetingMinutes."""
|
|
||||||
from meeting_minutes_generator import MeetingMinutes
|
|
||||||
|
|
||||||
minutes_dict = _minutes_store.get(recording_id)
|
|
||||||
if not minutes_dict:
|
|
||||||
raise HTTPException(status_code=404, detail="No meeting minutes found")
|
|
||||||
|
|
||||||
minutes_dict_copy = minutes_dict.copy()
|
|
||||||
if isinstance(minutes_dict_copy.get("generated_at"), str):
|
|
||||||
minutes_dict_copy["generated_at"] = datetime.fromisoformat(minutes_dict_copy["generated_at"])
|
|
||||||
|
|
||||||
return MeetingMinutes(**minutes_dict_copy)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/minutes/markdown")
|
|
||||||
async def get_minutes_markdown(recording_id: str):
|
|
||||||
"""
|
|
||||||
Exportiert Meeting Minutes als Markdown.
|
|
||||||
"""
|
|
||||||
from meeting_minutes_generator import minutes_to_markdown
|
|
||||||
|
|
||||||
minutes = _load_minutes(recording_id)
|
|
||||||
markdown = minutes_to_markdown(minutes)
|
|
||||||
|
|
||||||
return PlainTextResponse(
|
|
||||||
content=markdown,
|
|
||||||
media_type="text/markdown",
|
|
||||||
headers={"Content-Disposition": f"attachment; filename=protokoll_{recording_id}.md"}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/minutes/html")
|
|
||||||
async def get_minutes_html(recording_id: str):
|
|
||||||
"""
|
|
||||||
Exportiert Meeting Minutes als HTML.
|
|
||||||
"""
|
|
||||||
from meeting_minutes_generator import minutes_to_html
|
|
||||||
|
|
||||||
minutes = _load_minutes(recording_id)
|
|
||||||
html = minutes_to_html(minutes)
|
|
||||||
|
|
||||||
return HTMLResponse(content=html)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/minutes/pdf")
|
|
||||||
async def get_minutes_pdf(recording_id: str):
|
|
||||||
"""
|
|
||||||
Exportiert Meeting Minutes als PDF.
|
|
||||||
|
|
||||||
Benoetigt WeasyPrint (pip install weasyprint).
|
|
||||||
"""
|
|
||||||
from meeting_minutes_generator import minutes_to_html
|
|
||||||
|
|
||||||
minutes = _load_minutes(recording_id)
|
|
||||||
html = minutes_to_html(minutes)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from weasyprint import HTML
|
|
||||||
from fastapi.responses import Response
|
|
||||||
|
|
||||||
pdf_bytes = HTML(string=html).write_pdf()
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=pdf_bytes,
|
|
||||||
media_type="application/pdf",
|
|
||||||
headers={"Content-Disposition": f"attachment; filename=protokoll_{recording_id}.pdf"}
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=501,
|
|
||||||
detail="PDF export not available. Install weasyprint: pip install weasyprint"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,98 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/models.py
|
||||||
Recording API - Pydantic Models & Configuration.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Data models for recording, transcription, and webhook endpoints.
|
_sys.modules[__name__] = _importlib.import_module("recording.models")
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, List
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# ENVIRONMENT CONFIGURATION
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
MINIO_ENDPOINT = os.getenv("MINIO_ENDPOINT", "minio:9000")
|
|
||||||
MINIO_ACCESS_KEY = os.getenv("MINIO_ACCESS_KEY", "breakpilot")
|
|
||||||
MINIO_SECRET_KEY = os.getenv("MINIO_SECRET_KEY", "breakpilot123")
|
|
||||||
MINIO_BUCKET = os.getenv("MINIO_BUCKET", "breakpilot-recordings")
|
|
||||||
MINIO_SECURE = os.getenv("MINIO_SECURE", "false").lower() == "true"
|
|
||||||
|
|
||||||
# Default retention period in days (DSGVO compliance)
|
|
||||||
DEFAULT_RETENTION_DAYS = int(os.getenv("RECORDING_RETENTION_DAYS", "365"))
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# PYDANTIC MODELS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class JibriWebhookPayload(BaseModel):
|
|
||||||
"""Webhook payload from Jibri finalize.sh script."""
|
|
||||||
event: str = Field(..., description="Event type: recording_completed")
|
|
||||||
recording_name: str = Field(..., description="Unique recording identifier")
|
|
||||||
storage_path: str = Field(..., description="Path in MinIO bucket")
|
|
||||||
audio_path: Optional[str] = Field(None, description="Extracted audio path")
|
|
||||||
file_size_bytes: int = Field(..., description="Video file size in bytes")
|
|
||||||
timestamp: str = Field(..., description="ISO timestamp of upload")
|
|
||||||
|
|
||||||
|
|
||||||
class RecordingCreate(BaseModel):
|
|
||||||
"""Manual recording creation (for testing)."""
|
|
||||||
meeting_id: str
|
|
||||||
title: Optional[str] = None
|
|
||||||
storage_path: str
|
|
||||||
audio_path: Optional[str] = None
|
|
||||||
duration_seconds: Optional[int] = None
|
|
||||||
participant_count: Optional[int] = 0
|
|
||||||
retention_days: Optional[int] = DEFAULT_RETENTION_DAYS
|
|
||||||
|
|
||||||
|
|
||||||
class RecordingResponse(BaseModel):
|
|
||||||
"""Recording details response."""
|
|
||||||
id: str
|
|
||||||
meeting_id: str
|
|
||||||
title: Optional[str]
|
|
||||||
storage_path: str
|
|
||||||
audio_path: Optional[str]
|
|
||||||
file_size_bytes: Optional[int]
|
|
||||||
duration_seconds: Optional[int]
|
|
||||||
participant_count: int
|
|
||||||
status: str
|
|
||||||
recorded_at: datetime
|
|
||||||
retention_days: int
|
|
||||||
retention_expires_at: datetime
|
|
||||||
transcription_status: Optional[str] = None
|
|
||||||
transcription_id: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class RecordingListResponse(BaseModel):
|
|
||||||
"""Paginated list of recordings."""
|
|
||||||
recordings: List[RecordingResponse]
|
|
||||||
total: int
|
|
||||||
page: int
|
|
||||||
page_size: int
|
|
||||||
|
|
||||||
|
|
||||||
class TranscriptionRequest(BaseModel):
|
|
||||||
"""Request to start transcription."""
|
|
||||||
language: str = Field(default="de", description="Language code: de, en, etc.")
|
|
||||||
model: str = Field(default="large-v3", description="Whisper model to use")
|
|
||||||
priority: int = Field(default=0, description="Queue priority (higher = sooner)")
|
|
||||||
|
|
||||||
|
|
||||||
class TranscriptionStatusResponse(BaseModel):
|
|
||||||
"""Transcription status and progress."""
|
|
||||||
id: str
|
|
||||||
recording_id: str
|
|
||||||
status: str
|
|
||||||
language: str
|
|
||||||
model: str
|
|
||||||
word_count: Optional[int]
|
|
||||||
confidence_score: Optional[float]
|
|
||||||
processing_duration_seconds: Optional[int]
|
|
||||||
error_message: Optional[str]
|
|
||||||
created_at: datetime
|
|
||||||
completed_at: Optional[datetime]
|
|
||||||
|
|||||||
@@ -1,307 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/routes.py
|
||||||
Recording API - Core Recording Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Webhook, CRUD, health, audit, and download endpoints.
|
_sys.modules[__name__] = _importlib.import_module("recording.routes")
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Request
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
|
|
||||||
from recording_models import (
|
|
||||||
JibriWebhookPayload,
|
|
||||||
RecordingResponse,
|
|
||||||
RecordingListResponse,
|
|
||||||
MINIO_ENDPOINT,
|
|
||||||
MINIO_BUCKET,
|
|
||||||
DEFAULT_RETENTION_DAYS,
|
|
||||||
)
|
|
||||||
from recording_helpers import (
|
|
||||||
_recordings_store,
|
|
||||||
_transcriptions_store,
|
|
||||||
_audit_log,
|
|
||||||
log_audit,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Recordings"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# WEBHOOK ENDPOINT (Jibri)
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.post("/webhook")
|
|
||||||
async def jibri_webhook(payload: JibriWebhookPayload, request: Request):
|
|
||||||
"""
|
|
||||||
Webhook endpoint called by Jibri finalize.sh after upload.
|
|
||||||
|
|
||||||
This creates a new recording entry and optionally triggers transcription.
|
|
||||||
"""
|
|
||||||
if payload.event != "recording_completed":
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=400,
|
|
||||||
content={"error": f"Unknown event type: {payload.event}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Extract meeting_id from recording_name (format: meetingId_timestamp)
|
|
||||||
parts = payload.recording_name.split("_")
|
|
||||||
meeting_id = parts[0] if parts else payload.recording_name
|
|
||||||
|
|
||||||
# Create recording entry
|
|
||||||
recording_id = str(uuid.uuid4())
|
|
||||||
recorded_at = datetime.utcnow()
|
|
||||||
|
|
||||||
recording = {
|
|
||||||
"id": recording_id,
|
|
||||||
"meeting_id": meeting_id,
|
|
||||||
"jibri_session_id": payload.recording_name,
|
|
||||||
"title": f"Recording {meeting_id}",
|
|
||||||
"storage_path": payload.storage_path,
|
|
||||||
"audio_path": payload.audio_path,
|
|
||||||
"file_size_bytes": payload.file_size_bytes,
|
|
||||||
"duration_seconds": None, # Will be updated after analysis
|
|
||||||
"participant_count": 0,
|
|
||||||
"status": "uploaded",
|
|
||||||
"recorded_at": recorded_at.isoformat(),
|
|
||||||
"retention_days": DEFAULT_RETENTION_DAYS,
|
|
||||||
"created_at": datetime.utcnow().isoformat(),
|
|
||||||
"updated_at": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
_recordings_store[recording_id] = recording
|
|
||||||
|
|
||||||
# Log the creation
|
|
||||||
log_audit(
|
|
||||||
action="created",
|
|
||||||
recording_id=recording_id,
|
|
||||||
metadata={
|
|
||||||
"source": "jibri_webhook",
|
|
||||||
"storage_path": payload.storage_path,
|
|
||||||
"file_size_bytes": payload.file_size_bytes
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"meeting_id": meeting_id,
|
|
||||||
"status": "uploaded",
|
|
||||||
"message": "Recording registered successfully"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# HEALTH & AUDIT ENDPOINTS (must be before parameterized routes)
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def recordings_health():
|
|
||||||
"""Health check for recording service."""
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"recordings_count": len(_recordings_store),
|
|
||||||
"transcriptions_count": len(_transcriptions_store),
|
|
||||||
"minio_endpoint": MINIO_ENDPOINT,
|
|
||||||
"bucket": MINIO_BUCKET
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/audit/log")
|
|
||||||
async def get_audit_log(
|
|
||||||
recording_id: Optional[str] = Query(None),
|
|
||||||
action: Optional[str] = Query(None),
|
|
||||||
limit: int = Query(100, ge=1, le=1000)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Get audit log entries (DSGVO compliance).
|
|
||||||
|
|
||||||
Admin-only endpoint for reviewing recording access history.
|
|
||||||
"""
|
|
||||||
logs = _audit_log.copy()
|
|
||||||
|
|
||||||
if recording_id:
|
|
||||||
logs = [l for l in logs if l.get("recording_id") == recording_id]
|
|
||||||
if action:
|
|
||||||
logs = [l for l in logs if l.get("action") == action]
|
|
||||||
|
|
||||||
# Sort by created_at descending
|
|
||||||
logs.sort(key=lambda x: x["created_at"], reverse=True)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"entries": logs[:limit],
|
|
||||||
"total": len(logs)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# RECORDING MANAGEMENT ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.get("/", response_model=RecordingListResponse)
|
|
||||||
async def list_recordings(
|
|
||||||
status: Optional[str] = Query(None, description="Filter by status"),
|
|
||||||
meeting_id: Optional[str] = Query(None, description="Filter by meeting ID"),
|
|
||||||
page: int = Query(1, ge=1, description="Page number"),
|
|
||||||
page_size: int = Query(20, ge=1, le=100, description="Items per page")
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
List all recordings with optional filtering.
|
|
||||||
|
|
||||||
Supports pagination and filtering by status or meeting ID.
|
|
||||||
"""
|
|
||||||
# Filter recordings
|
|
||||||
recordings = list(_recordings_store.values())
|
|
||||||
|
|
||||||
if status:
|
|
||||||
recordings = [r for r in recordings if r["status"] == status]
|
|
||||||
if meeting_id:
|
|
||||||
recordings = [r for r in recordings if r["meeting_id"] == meeting_id]
|
|
||||||
|
|
||||||
# Sort by recorded_at descending
|
|
||||||
recordings.sort(key=lambda x: x["recorded_at"], reverse=True)
|
|
||||||
|
|
||||||
# Paginate
|
|
||||||
total = len(recordings)
|
|
||||||
start = (page - 1) * page_size
|
|
||||||
end = start + page_size
|
|
||||||
page_recordings = recordings[start:end]
|
|
||||||
|
|
||||||
# Convert to response format
|
|
||||||
result = []
|
|
||||||
for rec in page_recordings:
|
|
||||||
recorded_at = datetime.fromisoformat(rec["recorded_at"])
|
|
||||||
retention_expires = recorded_at + timedelta(days=rec["retention_days"])
|
|
||||||
|
|
||||||
# Check for transcription
|
|
||||||
trans = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == rec["id"]),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
|
|
||||||
result.append(RecordingResponse(
|
|
||||||
id=rec["id"],
|
|
||||||
meeting_id=rec["meeting_id"],
|
|
||||||
title=rec.get("title"),
|
|
||||||
storage_path=rec["storage_path"],
|
|
||||||
audio_path=rec.get("audio_path"),
|
|
||||||
file_size_bytes=rec.get("file_size_bytes"),
|
|
||||||
duration_seconds=rec.get("duration_seconds"),
|
|
||||||
participant_count=rec.get("participant_count", 0),
|
|
||||||
status=rec["status"],
|
|
||||||
recorded_at=recorded_at,
|
|
||||||
retention_days=rec["retention_days"],
|
|
||||||
retention_expires_at=retention_expires,
|
|
||||||
transcription_status=trans["status"] if trans else None,
|
|
||||||
transcription_id=trans["id"] if trans else None
|
|
||||||
))
|
|
||||||
|
|
||||||
return RecordingListResponse(
|
|
||||||
recordings=result,
|
|
||||||
total=total,
|
|
||||||
page=page,
|
|
||||||
page_size=page_size
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}", response_model=RecordingResponse)
|
|
||||||
async def get_recording(recording_id: str):
|
|
||||||
"""
|
|
||||||
Get details for a specific recording.
|
|
||||||
"""
|
|
||||||
recording = _recordings_store.get(recording_id)
|
|
||||||
if not recording:
|
|
||||||
raise HTTPException(status_code=404, detail="Recording not found")
|
|
||||||
|
|
||||||
# Log view action
|
|
||||||
log_audit(action="viewed", recording_id=recording_id)
|
|
||||||
|
|
||||||
recorded_at = datetime.fromisoformat(recording["recorded_at"])
|
|
||||||
retention_expires = recorded_at + timedelta(days=recording["retention_days"])
|
|
||||||
|
|
||||||
# Check for transcription
|
|
||||||
trans = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
|
|
||||||
return RecordingResponse(
|
|
||||||
id=recording["id"],
|
|
||||||
meeting_id=recording["meeting_id"],
|
|
||||||
title=recording.get("title"),
|
|
||||||
storage_path=recording["storage_path"],
|
|
||||||
audio_path=recording.get("audio_path"),
|
|
||||||
file_size_bytes=recording.get("file_size_bytes"),
|
|
||||||
duration_seconds=recording.get("duration_seconds"),
|
|
||||||
participant_count=recording.get("participant_count", 0),
|
|
||||||
status=recording["status"],
|
|
||||||
recorded_at=recorded_at,
|
|
||||||
retention_days=recording["retention_days"],
|
|
||||||
retention_expires_at=retention_expires,
|
|
||||||
transcription_status=trans["status"] if trans else None,
|
|
||||||
transcription_id=trans["id"] if trans else None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{recording_id}")
|
|
||||||
async def delete_recording(
|
|
||||||
recording_id: str,
|
|
||||||
reason: str = Query(..., description="Reason for deletion (DSGVO audit)")
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Soft-delete a recording (DSGVO compliance).
|
|
||||||
|
|
||||||
The recording is marked as deleted but retained for audit purposes.
|
|
||||||
Actual file deletion happens after the audit retention period.
|
|
||||||
"""
|
|
||||||
recording = _recordings_store.get(recording_id)
|
|
||||||
if not recording:
|
|
||||||
raise HTTPException(status_code=404, detail="Recording not found")
|
|
||||||
|
|
||||||
# Soft delete
|
|
||||||
recording["status"] = "deleted"
|
|
||||||
recording["deleted_at"] = datetime.utcnow().isoformat()
|
|
||||||
recording["updated_at"] = datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
# Log deletion with reason
|
|
||||||
log_audit(
|
|
||||||
action="deleted",
|
|
||||||
recording_id=recording_id,
|
|
||||||
metadata={"reason": reason}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"status": "deleted",
|
|
||||||
"message": "Recording marked for deletion"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/download")
|
|
||||||
async def download_recording(recording_id: str):
|
|
||||||
"""
|
|
||||||
Download the recording file.
|
|
||||||
|
|
||||||
In production, this would generate a presigned URL to MinIO.
|
|
||||||
"""
|
|
||||||
recording = _recordings_store.get(recording_id)
|
|
||||||
if not recording:
|
|
||||||
raise HTTPException(status_code=404, detail="Recording not found")
|
|
||||||
|
|
||||||
if recording["status"] == "deleted":
|
|
||||||
raise HTTPException(status_code=410, detail="Recording has been deleted")
|
|
||||||
|
|
||||||
# Log download action
|
|
||||||
log_audit(action="downloaded", recording_id=recording_id)
|
|
||||||
|
|
||||||
# In production, generate presigned URL to MinIO
|
|
||||||
# For now, return info about where the file is
|
|
||||||
return {
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"storage_path": recording["storage_path"],
|
|
||||||
"file_size_bytes": recording.get("file_size_bytes"),
|
|
||||||
"message": "In production, this would redirect to a presigned MinIO URL"
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,250 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to recording/transcription.py
|
||||||
Recording API - Transcription Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Start transcription, get status, download VTT/SRT subtitle files.
|
_sys.modules[__name__] = _importlib.import_module("recording.transcription")
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException
|
|
||||||
from fastapi.responses import PlainTextResponse
|
|
||||||
|
|
||||||
from recording_models import (
|
|
||||||
TranscriptionRequest,
|
|
||||||
TranscriptionStatusResponse,
|
|
||||||
)
|
|
||||||
from recording_helpers import (
|
|
||||||
_recordings_store,
|
|
||||||
_transcriptions_store,
|
|
||||||
log_audit,
|
|
||||||
format_vtt_time,
|
|
||||||
format_srt_time,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Recordings"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# TRANSCRIPTION ENDPOINTS
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
@router.post("/{recording_id}/transcribe", response_model=TranscriptionStatusResponse)
|
|
||||||
async def start_transcription(recording_id: str, request: TranscriptionRequest):
|
|
||||||
"""
|
|
||||||
Start transcription for a recording.
|
|
||||||
|
|
||||||
Queues the recording for processing by the transcription worker.
|
|
||||||
"""
|
|
||||||
recording = _recordings_store.get(recording_id)
|
|
||||||
if not recording:
|
|
||||||
raise HTTPException(status_code=404, detail="Recording not found")
|
|
||||||
|
|
||||||
if recording["status"] == "deleted":
|
|
||||||
raise HTTPException(status_code=400, detail="Cannot transcribe deleted recording")
|
|
||||||
|
|
||||||
# Check if transcription already exists
|
|
||||||
existing = next(
|
|
||||||
(t for t in _transcriptions_store.values()
|
|
||||||
if t["recording_id"] == recording_id and t["status"] != "failed"),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=409,
|
|
||||||
detail=f"Transcription already exists with status: {existing['status']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create transcription entry
|
|
||||||
transcription_id = str(uuid.uuid4())
|
|
||||||
now = datetime.utcnow()
|
|
||||||
|
|
||||||
transcription = {
|
|
||||||
"id": transcription_id,
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"language": request.language,
|
|
||||||
"model": request.model,
|
|
||||||
"status": "pending",
|
|
||||||
"full_text": None,
|
|
||||||
"word_count": None,
|
|
||||||
"confidence_score": None,
|
|
||||||
"vtt_path": None,
|
|
||||||
"srt_path": None,
|
|
||||||
"json_path": None,
|
|
||||||
"error_message": None,
|
|
||||||
"processing_started_at": None,
|
|
||||||
"processing_completed_at": None,
|
|
||||||
"processing_duration_seconds": None,
|
|
||||||
"created_at": now.isoformat(),
|
|
||||||
"updated_at": now.isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
_transcriptions_store[transcription_id] = transcription
|
|
||||||
|
|
||||||
# Update recording status
|
|
||||||
recording["status"] = "processing"
|
|
||||||
recording["updated_at"] = now.isoformat()
|
|
||||||
|
|
||||||
# Log transcription start
|
|
||||||
log_audit(
|
|
||||||
action="transcription_started",
|
|
||||||
recording_id=recording_id,
|
|
||||||
transcription_id=transcription_id,
|
|
||||||
metadata={"language": request.language, "model": request.model}
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: Queue job to Redis/Valkey for transcription worker
|
|
||||||
|
|
||||||
return TranscriptionStatusResponse(
|
|
||||||
id=transcription_id,
|
|
||||||
recording_id=recording_id,
|
|
||||||
status="pending",
|
|
||||||
language=request.language,
|
|
||||||
model=request.model,
|
|
||||||
word_count=None,
|
|
||||||
confidence_score=None,
|
|
||||||
processing_duration_seconds=None,
|
|
||||||
error_message=None,
|
|
||||||
created_at=now,
|
|
||||||
completed_at=None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/transcription", response_model=TranscriptionStatusResponse)
|
|
||||||
async def get_transcription_status(recording_id: str):
|
|
||||||
"""
|
|
||||||
Get transcription status for a recording.
|
|
||||||
"""
|
|
||||||
transcription = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not transcription:
|
|
||||||
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
|
||||||
|
|
||||||
return TranscriptionStatusResponse(
|
|
||||||
id=transcription["id"],
|
|
||||||
recording_id=transcription["recording_id"],
|
|
||||||
status=transcription["status"],
|
|
||||||
language=transcription["language"],
|
|
||||||
model=transcription["model"],
|
|
||||||
word_count=transcription.get("word_count"),
|
|
||||||
confidence_score=transcription.get("confidence_score"),
|
|
||||||
processing_duration_seconds=transcription.get("processing_duration_seconds"),
|
|
||||||
error_message=transcription.get("error_message"),
|
|
||||||
created_at=datetime.fromisoformat(transcription["created_at"]),
|
|
||||||
completed_at=(
|
|
||||||
datetime.fromisoformat(transcription["processing_completed_at"])
|
|
||||||
if transcription.get("processing_completed_at") else None
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/transcription/text")
|
|
||||||
async def get_transcription_text(recording_id: str):
|
|
||||||
"""
|
|
||||||
Get the full transcription text.
|
|
||||||
"""
|
|
||||||
transcription = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not transcription:
|
|
||||||
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
|
||||||
|
|
||||||
if transcription["status"] != "completed":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Transcription not ready. Status: {transcription['status']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"transcription_id": transcription["id"],
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"language": transcription["language"],
|
|
||||||
"text": transcription.get("full_text", ""),
|
|
||||||
"word_count": transcription.get("word_count", 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/transcription/vtt")
|
|
||||||
async def get_transcription_vtt(recording_id: str):
|
|
||||||
"""
|
|
||||||
Download transcription as WebVTT subtitle file.
|
|
||||||
"""
|
|
||||||
transcription = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not transcription:
|
|
||||||
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
|
||||||
|
|
||||||
if transcription["status"] != "completed":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Transcription not ready. Status: {transcription['status']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generate VTT content
|
|
||||||
vtt_content = "WEBVTT\n\n"
|
|
||||||
text = transcription.get("full_text", "")
|
|
||||||
|
|
||||||
if text:
|
|
||||||
sentences = text.replace(".", ".\n").split("\n")
|
|
||||||
time_offset = 0
|
|
||||||
for sentence in sentences:
|
|
||||||
sentence = sentence.strip()
|
|
||||||
if sentence:
|
|
||||||
start = format_vtt_time(time_offset)
|
|
||||||
time_offset += 3000
|
|
||||||
end = format_vtt_time(time_offset)
|
|
||||||
vtt_content += f"{start} --> {end}\n{sentence}\n\n"
|
|
||||||
|
|
||||||
return PlainTextResponse(
|
|
||||||
content=vtt_content,
|
|
||||||
media_type="text/vtt",
|
|
||||||
headers={"Content-Disposition": f"attachment; filename=transcript_{recording_id}.vtt"}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{recording_id}/transcription/srt")
|
|
||||||
async def get_transcription_srt(recording_id: str):
|
|
||||||
"""
|
|
||||||
Download transcription as SRT subtitle file.
|
|
||||||
"""
|
|
||||||
transcription = next(
|
|
||||||
(t for t in _transcriptions_store.values() if t["recording_id"] == recording_id),
|
|
||||||
None
|
|
||||||
)
|
|
||||||
if not transcription:
|
|
||||||
raise HTTPException(status_code=404, detail="No transcription found for this recording")
|
|
||||||
|
|
||||||
if transcription["status"] != "completed":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Transcription not ready. Status: {transcription['status']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generate SRT content
|
|
||||||
srt_content = ""
|
|
||||||
text = transcription.get("full_text", "")
|
|
||||||
|
|
||||||
if text:
|
|
||||||
sentences = text.replace(".", ".\n").split("\n")
|
|
||||||
time_offset = 0
|
|
||||||
index = 1
|
|
||||||
for sentence in sentences:
|
|
||||||
sentence = sentence.strip()
|
|
||||||
if sentence:
|
|
||||||
start = format_srt_time(time_offset)
|
|
||||||
time_offset += 3000
|
|
||||||
end = format_srt_time(time_offset)
|
|
||||||
srt_content += f"{index}\n{start} --> {end}\n{sentence}\n\n"
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
return PlainTextResponse(
|
|
||||||
content=srt_content,
|
|
||||||
media_type="text/plain",
|
|
||||||
headers={"Content-Disposition": f"attachment; filename=transcript_{recording_id}.srt"}
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,267 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to dashboard/analytics.py
|
||||||
# Teacher Dashboard - Analytics & Progress Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
|
_sys.modules[__name__] = _importlib.import_module("dashboard.analytics")
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from teacher_dashboard_models import (
|
|
||||||
UnitAssignmentStatus, TeacherControlSettings,
|
|
||||||
UnitAssignment, StudentUnitProgress, ClassUnitProgress,
|
|
||||||
MisconceptionReport, ClassAnalyticsSummary, ContentResource,
|
|
||||||
get_current_teacher, get_teacher_database,
|
|
||||||
get_classes_for_teacher, get_students_in_class,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Teacher Dashboard"])
|
|
||||||
|
|
||||||
# Shared in-memory store reference (set from teacher_dashboard_api)
|
|
||||||
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def set_assignments_store(store: Dict[str, Dict[str, Any]]):
|
|
||||||
"""Share the in-memory assignments store from the main module."""
|
|
||||||
global _assignments_store
|
|
||||||
_assignments_store = store
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Progress & Analytics
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/assignments/{assignment_id}/progress", response_model=ClassUnitProgress)
|
|
||||||
async def get_assignment_progress(
|
|
||||||
assignment_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> ClassUnitProgress:
|
|
||||||
"""Get detailed progress for an assignment."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignment = None
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get assignment: {e}")
|
|
||||||
if not assignment and assignment_id in _assignments_store:
|
|
||||||
assignment = _assignments_store[assignment_id]
|
|
||||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
students = await get_students_in_class(assignment["class_id"])
|
|
||||||
student_progress = []
|
|
||||||
total_completion = 0.0
|
|
||||||
total_precheck = 0.0
|
|
||||||
total_postcheck = 0.0
|
|
||||||
total_time = 0
|
|
||||||
precheck_count = 0
|
|
||||||
postcheck_count = 0
|
|
||||||
started = 0
|
|
||||||
completed = 0
|
|
||||||
|
|
||||||
for student in students:
|
|
||||||
student_id = student.get("id", student.get("student_id"))
|
|
||||||
progress = StudentUnitProgress(
|
|
||||||
student_id=student_id,
|
|
||||||
student_name=student.get("name", f"Student {student_id[:8]}"),
|
|
||||||
status="not_started", completion_rate=0.0, stops_completed=0, total_stops=0,
|
|
||||||
)
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
session_data = await db.get_student_unit_session(
|
|
||||||
student_id=student_id, unit_id=assignment["unit_id"]
|
|
||||||
)
|
|
||||||
if session_data:
|
|
||||||
progress.session_id = session_data.get("session_id")
|
|
||||||
progress.status = "completed" if session_data.get("completed_at") else "in_progress"
|
|
||||||
progress.completion_rate = session_data.get("completion_rate", 0.0)
|
|
||||||
progress.precheck_score = session_data.get("precheck_score")
|
|
||||||
progress.postcheck_score = session_data.get("postcheck_score")
|
|
||||||
progress.time_spent_minutes = session_data.get("duration_seconds", 0) // 60
|
|
||||||
progress.last_activity = session_data.get("updated_at")
|
|
||||||
progress.stops_completed = session_data.get("stops_completed", 0)
|
|
||||||
progress.total_stops = session_data.get("total_stops", 0)
|
|
||||||
if progress.precheck_score is not None and progress.postcheck_score is not None:
|
|
||||||
progress.learning_gain = progress.postcheck_score - progress.precheck_score
|
|
||||||
total_completion += progress.completion_rate
|
|
||||||
total_time += progress.time_spent_minutes
|
|
||||||
if progress.precheck_score is not None:
|
|
||||||
total_precheck += progress.precheck_score
|
|
||||||
precheck_count += 1
|
|
||||||
if progress.postcheck_score is not None:
|
|
||||||
total_postcheck += progress.postcheck_score
|
|
||||||
postcheck_count += 1
|
|
||||||
if progress.status != "not_started":
|
|
||||||
started += 1
|
|
||||||
if progress.status == "completed":
|
|
||||||
completed += 1
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get student progress: {e}")
|
|
||||||
student_progress.append(progress)
|
|
||||||
|
|
||||||
total_students = len(students) or 1
|
|
||||||
return ClassUnitProgress(
|
|
||||||
assignment_id=assignment_id, unit_id=assignment["unit_id"],
|
|
||||||
unit_title=f"Unit {assignment['unit_id']}", class_id=assignment["class_id"],
|
|
||||||
class_name=f"Class {assignment['class_id'][:8]}", total_students=len(students),
|
|
||||||
started_count=started, completed_count=completed,
|
|
||||||
avg_completion_rate=total_completion / total_students,
|
|
||||||
avg_precheck_score=total_precheck / precheck_count if precheck_count > 0 else None,
|
|
||||||
avg_postcheck_score=total_postcheck / postcheck_count if postcheck_count > 0 else None,
|
|
||||||
avg_learning_gain=(total_postcheck / postcheck_count - total_precheck / precheck_count)
|
|
||||||
if precheck_count > 0 and postcheck_count > 0 else None,
|
|
||||||
avg_time_minutes=total_time / started if started > 0 else 0,
|
|
||||||
students=student_progress,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/classes/{class_id}/analytics", response_model=ClassAnalyticsSummary)
|
|
||||||
async def get_class_analytics(
|
|
||||||
class_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> ClassAnalyticsSummary:
|
|
||||||
"""Get summary analytics for a class."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignments = []
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignments = await db.list_assignments(teacher_id=teacher["user_id"], class_id=class_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to list assignments: {e}")
|
|
||||||
if not assignments:
|
|
||||||
assignments = [
|
|
||||||
a for a in _assignments_store.values()
|
|
||||||
if a["class_id"] == class_id and a["teacher_id"] == teacher["user_id"]
|
|
||||||
]
|
|
||||||
|
|
||||||
total_units = len(assignments)
|
|
||||||
completed_units = sum(1 for a in assignments if a.get("status") == "completed")
|
|
||||||
active_units = sum(1 for a in assignments if a.get("status") == "active")
|
|
||||||
|
|
||||||
students = await get_students_in_class(class_id)
|
|
||||||
student_scores = {}
|
|
||||||
misconceptions = []
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
for student in students:
|
|
||||||
student_id = student.get("id", student.get("student_id"))
|
|
||||||
analytics = await db.get_student_analytics(student_id)
|
|
||||||
if analytics:
|
|
||||||
student_scores[student_id] = {
|
|
||||||
"name": student.get("name", student_id[:8]),
|
|
||||||
"avg_score": analytics.get("avg_postcheck_score", 0),
|
|
||||||
"total_time": analytics.get("total_time_minutes", 0),
|
|
||||||
}
|
|
||||||
misconceptions_data = await db.get_class_misconceptions(class_id)
|
|
||||||
for m in misconceptions_data:
|
|
||||||
misconceptions.append(MisconceptionReport(
|
|
||||||
concept_id=m["concept_id"], concept_label=m["concept_label"],
|
|
||||||
misconception=m["misconception"], affected_students=m["affected_students"],
|
|
||||||
frequency=m["frequency"], unit_id=m["unit_id"], stop_id=m["stop_id"],
|
|
||||||
))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to aggregate analytics: {e}")
|
|
||||||
|
|
||||||
sorted_students = sorted(student_scores.items(), key=lambda x: x[1]["avg_score"], reverse=True)
|
|
||||||
top_performers = [s[1]["name"] for s in sorted_students[:3]]
|
|
||||||
struggling_students = [s[1]["name"] for s in sorted_students[-3:] if s[1]["avg_score"] < 0.6]
|
|
||||||
total_time = sum(s["total_time"] for s in student_scores.values())
|
|
||||||
avg_scores = [s["avg_score"] for s in student_scores.values() if s["avg_score"] > 0]
|
|
||||||
avg_completion = sum(avg_scores) / len(avg_scores) if avg_scores else 0
|
|
||||||
|
|
||||||
return ClassAnalyticsSummary(
|
|
||||||
class_id=class_id, class_name=f"Klasse {class_id[:8]}",
|
|
||||||
total_units_assigned=total_units, units_completed=completed_units,
|
|
||||||
active_units=active_units, avg_completion_rate=avg_completion,
|
|
||||||
avg_learning_gain=None, total_time_hours=total_time / 60,
|
|
||||||
top_performers=top_performers, struggling_students=struggling_students,
|
|
||||||
common_misconceptions=misconceptions[:5],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/students/{student_id}/progress")
|
|
||||||
async def get_student_progress(
|
|
||||||
student_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Get detailed progress for a specific student."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
progress = await db.get_student_full_progress(student_id)
|
|
||||||
return progress
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get student progress: {e}")
|
|
||||||
return {
|
|
||||||
"student_id": student_id, "units_attempted": 0, "units_completed": 0,
|
|
||||||
"avg_score": 0.0, "total_time_minutes": 0, "sessions": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Content Resources
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/assignments/{assignment_id}/resources", response_model=List[ContentResource])
|
|
||||||
async def get_assignment_resources(
|
|
||||||
assignment_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher),
|
|
||||||
request: Request = None
|
|
||||||
) -> List[ContentResource]:
|
|
||||||
"""Get generated content resources for an assignment."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignment = None
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get assignment: {e}")
|
|
||||||
if not assignment and assignment_id in _assignments_store:
|
|
||||||
assignment = _assignments_store[assignment_id]
|
|
||||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
unit_id = assignment["unit_id"]
|
|
||||||
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:8000"
|
|
||||||
return [
|
|
||||||
ContentResource(resource_type="h5p", title=f"{unit_id} - H5P Aktivitaeten",
|
|
||||||
url=f"{base_url}/api/units/content/{unit_id}/h5p",
|
|
||||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
|
||||||
ContentResource(resource_type="worksheet", title=f"{unit_id} - Arbeitsblatt (HTML)",
|
|
||||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet",
|
|
||||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
|
||||||
ContentResource(resource_type="pdf", title=f"{unit_id} - Arbeitsblatt (PDF)",
|
|
||||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet.pdf",
|
|
||||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/assignments/{assignment_id}/regenerate-content")
|
|
||||||
async def regenerate_content(
|
|
||||||
assignment_id: str,
|
|
||||||
resource_type: str = Query("all", description="h5p, pdf, or all"),
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Trigger regeneration of content resources."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignment = None
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get assignment: {e}")
|
|
||||||
if not assignment and assignment_id in _assignments_store:
|
|
||||||
assignment = _assignments_store[assignment_id]
|
|
||||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
logger.info(f"Content regeneration triggered for {assignment['unit_id']}: {resource_type}")
|
|
||||||
return {
|
|
||||||
"status": "queued", "assignment_id": assignment_id,
|
|
||||||
"unit_id": assignment["unit_id"], "resource_type": resource_type,
|
|
||||||
"message": "Content regeneration has been queued",
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,329 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to dashboard/api.py
|
||||||
# Breakpilot Drive - Teacher Dashboard API
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Lehrer-Dashboard fuer Unit-Zuweisung und Analytics.
|
_sys.modules[__name__] = _importlib.import_module("dashboard.api")
|
||||||
#
|
|
||||||
# Split structure:
|
|
||||||
# - teacher_dashboard_models.py: Models, Auth, DB/School helpers
|
|
||||||
# - teacher_dashboard_analytics.py: Progress, analytics, content routes
|
|
||||||
# - teacher_dashboard_api.py: Assignment CRUD, dashboard, units (this file)
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import uuid
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from teacher_dashboard_models import (
|
|
||||||
UnitAssignmentStatus, TeacherControlSettings, AssignUnitRequest,
|
|
||||||
UnitAssignment,
|
|
||||||
get_current_teacher, get_teacher_database,
|
|
||||||
get_classes_for_teacher,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
from teacher_dashboard_analytics import (
|
|
||||||
router as analytics_router,
|
|
||||||
set_assignments_store,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/teacher", tags=["Teacher Dashboard"])
|
|
||||||
|
|
||||||
# In-Memory Storage (Fallback)
|
|
||||||
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# Share the store with the analytics module and include its routes
|
|
||||||
set_assignments_store(_assignments_store)
|
|
||||||
router.include_router(analytics_router)
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Unit Assignment
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.post("/assignments", response_model=UnitAssignment)
|
|
||||||
async def assign_unit_to_class(
|
|
||||||
request_data: AssignUnitRequest,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> UnitAssignment:
|
|
||||||
"""Assign a unit to a class."""
|
|
||||||
assignment_id = str(uuid.uuid4())
|
|
||||||
now = datetime.utcnow()
|
|
||||||
settings = request_data.settings or TeacherControlSettings()
|
|
||||||
|
|
||||||
assignment = {
|
|
||||||
"assignment_id": assignment_id, "unit_id": request_data.unit_id,
|
|
||||||
"class_id": request_data.class_id, "teacher_id": teacher["user_id"],
|
|
||||||
"status": UnitAssignmentStatus.ACTIVE, "settings": settings.model_dump(),
|
|
||||||
"due_date": request_data.due_date, "notes": request_data.notes,
|
|
||||||
"created_at": now, "updated_at": now,
|
|
||||||
}
|
|
||||||
|
|
||||||
db = await get_teacher_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
await db.create_assignment(assignment)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to store assignment: {e}")
|
|
||||||
|
|
||||||
_assignments_store[assignment_id] = assignment
|
|
||||||
logger.info(f"Unit {request_data.unit_id} assigned to class {request_data.class_id}")
|
|
||||||
|
|
||||||
return UnitAssignment(
|
|
||||||
assignment_id=assignment_id, unit_id=request_data.unit_id,
|
|
||||||
class_id=request_data.class_id, teacher_id=teacher["user_id"],
|
|
||||||
status=UnitAssignmentStatus.ACTIVE, settings=settings,
|
|
||||||
due_date=request_data.due_date, notes=request_data.notes,
|
|
||||||
created_at=now, updated_at=now,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/assignments", response_model=List[UnitAssignment])
|
|
||||||
async def list_assignments(
|
|
||||||
class_id: Optional[str] = Query(None, description="Filter by class"),
|
|
||||||
status: Optional[UnitAssignmentStatus] = Query(None, description="Filter by status"),
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> List[UnitAssignment]:
|
|
||||||
"""List all unit assignments for the teacher."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignments = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignments = await db.list_assignments(
|
|
||||||
teacher_id=teacher["user_id"],
|
|
||||||
class_id=class_id,
|
|
||||||
status=status.value if status else None
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to list assignments: {e}")
|
|
||||||
|
|
||||||
if not assignments:
|
|
||||||
for assignment in _assignments_store.values():
|
|
||||||
if assignment["teacher_id"] != teacher["user_id"]:
|
|
||||||
continue
|
|
||||||
if class_id and assignment["class_id"] != class_id:
|
|
||||||
continue
|
|
||||||
if status and assignment["status"] != status.value:
|
|
||||||
continue
|
|
||||||
assignments.append(assignment)
|
|
||||||
|
|
||||||
return [
|
|
||||||
UnitAssignment(
|
|
||||||
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
|
||||||
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
|
||||||
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
|
||||||
due_date=a.get("due_date"), notes=a.get("notes"),
|
|
||||||
created_at=a["created_at"], updated_at=a["updated_at"],
|
|
||||||
)
|
|
||||||
for a in assignments
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/assignments/{assignment_id}", response_model=UnitAssignment)
|
|
||||||
async def get_assignment(
|
|
||||||
assignment_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> UnitAssignment:
|
|
||||||
"""Get details of a specific assignment."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
if assignment and assignment["teacher_id"] == teacher["user_id"]:
|
|
||||||
return UnitAssignment(
|
|
||||||
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
|
||||||
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
|
||||||
status=assignment["status"],
|
|
||||||
settings=TeacherControlSettings(**assignment["settings"]),
|
|
||||||
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
|
||||||
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get assignment: {e}")
|
|
||||||
|
|
||||||
if assignment_id in _assignments_store:
|
|
||||||
a = _assignments_store[assignment_id]
|
|
||||||
if a["teacher_id"] == teacher["user_id"]:
|
|
||||||
return UnitAssignment(
|
|
||||||
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
|
||||||
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
|
||||||
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
|
||||||
due_date=a.get("due_date"), notes=a.get("notes"),
|
|
||||||
created_at=a["created_at"], updated_at=a["updated_at"],
|
|
||||||
)
|
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/assignments/{assignment_id}")
|
|
||||||
async def update_assignment(
|
|
||||||
assignment_id: str,
|
|
||||||
settings: Optional[TeacherControlSettings] = None,
|
|
||||||
status: Optional[UnitAssignmentStatus] = None,
|
|
||||||
due_date: Optional[datetime] = None,
|
|
||||||
notes: Optional[str] = None,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> UnitAssignment:
|
|
||||||
"""Update assignment settings or status."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
assignment = None
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get assignment: {e}")
|
|
||||||
|
|
||||||
if not assignment and assignment_id in _assignments_store:
|
|
||||||
assignment = _assignments_store[assignment_id]
|
|
||||||
|
|
||||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
if settings:
|
|
||||||
assignment["settings"] = settings.model_dump()
|
|
||||||
if status:
|
|
||||||
assignment["status"] = status.value
|
|
||||||
if due_date:
|
|
||||||
assignment["due_date"] = due_date
|
|
||||||
if notes is not None:
|
|
||||||
assignment["notes"] = notes
|
|
||||||
assignment["updated_at"] = datetime.utcnow()
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
await db.update_assignment(assignment)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to update assignment: {e}")
|
|
||||||
|
|
||||||
_assignments_store[assignment_id] = assignment
|
|
||||||
|
|
||||||
return UnitAssignment(
|
|
||||||
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
|
||||||
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
|
||||||
status=assignment["status"], settings=TeacherControlSettings(**assignment["settings"]),
|
|
||||||
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
|
||||||
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/assignments/{assignment_id}")
|
|
||||||
async def delete_assignment(
|
|
||||||
assignment_id: str,
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> Dict[str, str]:
|
|
||||||
"""Delete/archive an assignment."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
assignment = await db.get_assignment(assignment_id)
|
|
||||||
if assignment and assignment["teacher_id"] == teacher["user_id"]:
|
|
||||||
await db.delete_assignment(assignment_id)
|
|
||||||
if assignment_id in _assignments_store:
|
|
||||||
del _assignments_store[assignment_id]
|
|
||||||
return {"status": "deleted", "assignment_id": assignment_id}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to delete assignment: {e}")
|
|
||||||
|
|
||||||
if assignment_id in _assignments_store:
|
|
||||||
a = _assignments_store[assignment_id]
|
|
||||||
if a["teacher_id"] == teacher["user_id"]:
|
|
||||||
del _assignments_store[assignment_id]
|
|
||||||
return {"status": "deleted", "assignment_id": assignment_id}
|
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Available Units
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/units/available")
|
|
||||||
async def list_available_units(
|
|
||||||
grade: Optional[str] = Query(None, description="Filter by grade level"),
|
|
||||||
template: Optional[str] = Query(None, description="Filter by template type"),
|
|
||||||
locale: str = Query("de-DE", description="Locale"),
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
"""List all available units for assignment."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
units = await db.list_available_units(grade=grade, template=template, locale=locale)
|
|
||||||
return units
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to list units: {e}")
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"unit_id": "bio_eye_lightpath_v1", "title": "Auge - Lichtstrahl-Flug",
|
|
||||||
"template": "flight_path", "grade_band": ["5", "6", "7"],
|
|
||||||
"duration_minutes": 8, "difficulty": "base",
|
|
||||||
"description": "Reise durch das Auge und folge dem Lichtstrahl",
|
|
||||||
"learning_objectives": ["Verstehen des Lichtwegs durch das Auge",
|
|
||||||
"Funktionen der Augenbestandteile benennen"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"unit_id": "math_pizza_equivalence_v1",
|
|
||||||
"title": "Pizza-Boxenstopp - Brueche und Prozent",
|
|
||||||
"template": "station_loop", "grade_band": ["5", "6"],
|
|
||||||
"duration_minutes": 10, "difficulty": "base",
|
|
||||||
"description": "Entdecke die Verbindung zwischen Bruechen, Dezimalzahlen und Prozent",
|
|
||||||
"learning_objectives": ["Brueche in Prozent umrechnen", "Aequivalenzen erkennen"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Dashboard Overview
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/dashboard")
|
|
||||||
async def get_dashboard(
|
|
||||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Get teacher dashboard overview."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
classes = await get_classes_for_teacher(teacher["user_id"])
|
|
||||||
|
|
||||||
active_assignments = []
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
active_assignments = await db.list_assignments(
|
|
||||||
teacher_id=teacher["user_id"], status="active"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to list assignments: {e}")
|
|
||||||
if not active_assignments:
|
|
||||||
active_assignments = [
|
|
||||||
a for a in _assignments_store.values()
|
|
||||||
if a["teacher_id"] == teacher["user_id"] and a.get("status") == "active"
|
|
||||||
]
|
|
||||||
|
|
||||||
alerts = []
|
|
||||||
for assignment in active_assignments:
|
|
||||||
if assignment.get("due_date") and assignment["due_date"] < datetime.utcnow() + timedelta(days=2):
|
|
||||||
alerts.append({
|
|
||||||
"type": "due_soon", "assignment_id": assignment["assignment_id"],
|
|
||||||
"message": "Zuweisung endet in weniger als 2 Tagen",
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
"teacher": {"id": teacher["user_id"], "name": teacher.get("name", "Lehrer"),
|
|
||||||
"email": teacher.get("email")},
|
|
||||||
"classes": len(classes), "active_assignments": len(active_assignments),
|
|
||||||
"total_students": sum(c.get("student_count", 0) for c in classes),
|
|
||||||
"alerts": alerts, "recent_activity": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def health_check() -> Dict[str, Any]:
|
|
||||||
"""Health check for teacher dashboard API."""
|
|
||||||
db = await get_teacher_database()
|
|
||||||
db_status = "connected" if db else "in-memory"
|
|
||||||
return {
|
|
||||||
"status": "healthy", "service": "teacher-dashboard",
|
|
||||||
"database": db_status, "auth_required": REQUIRE_AUTH,
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,226 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to dashboard/models.py
|
||||||
Teacher Dashboard - Pydantic Models, Auth Dependency, and Service Helpers.
|
import importlib as _importlib
|
||||||
"""
|
import sys as _sys
|
||||||
|
_sys.modules[__name__] = _importlib.import_module("dashboard.models")
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from fastapi import HTTPException, Request
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Feature flags
|
|
||||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
|
||||||
REQUIRE_AUTH = os.getenv("TEACHER_REQUIRE_AUTH", "true").lower() == "true"
|
|
||||||
SCHOOL_SERVICE_URL = os.getenv("SCHOOL_SERVICE_URL", "http://school-service:8084")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Pydantic Models
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
class UnitAssignmentStatus(str, Enum):
|
|
||||||
"""Status of a unit assignment"""
|
|
||||||
DRAFT = "draft"
|
|
||||||
ACTIVE = "active"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
ARCHIVED = "archived"
|
|
||||||
|
|
||||||
|
|
||||||
class TeacherControlSettings(BaseModel):
|
|
||||||
"""Unit settings that teachers can configure"""
|
|
||||||
allow_skip: bool = True
|
|
||||||
allow_replay: bool = True
|
|
||||||
max_time_per_stop_sec: int = 90
|
|
||||||
show_hints: bool = True
|
|
||||||
require_precheck: bool = True
|
|
||||||
require_postcheck: bool = True
|
|
||||||
|
|
||||||
|
|
||||||
class AssignUnitRequest(BaseModel):
|
|
||||||
"""Request to assign a unit to a class"""
|
|
||||||
unit_id: str
|
|
||||||
class_id: str
|
|
||||||
due_date: Optional[datetime] = None
|
|
||||||
settings: Optional[TeacherControlSettings] = None
|
|
||||||
notes: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class UnitAssignment(BaseModel):
|
|
||||||
"""Unit assignment record"""
|
|
||||||
assignment_id: str
|
|
||||||
unit_id: str
|
|
||||||
class_id: str
|
|
||||||
teacher_id: str
|
|
||||||
status: UnitAssignmentStatus
|
|
||||||
settings: TeacherControlSettings
|
|
||||||
due_date: Optional[datetime] = None
|
|
||||||
notes: Optional[str] = None
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class StudentUnitProgress(BaseModel):
|
|
||||||
"""Progress of a single student on a unit"""
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
session_id: Optional[str] = None
|
|
||||||
status: str # "not_started", "in_progress", "completed"
|
|
||||||
completion_rate: float = 0.0
|
|
||||||
precheck_score: Optional[float] = None
|
|
||||||
postcheck_score: Optional[float] = None
|
|
||||||
learning_gain: Optional[float] = None
|
|
||||||
time_spent_minutes: int = 0
|
|
||||||
last_activity: Optional[datetime] = None
|
|
||||||
current_stop: Optional[str] = None
|
|
||||||
stops_completed: int = 0
|
|
||||||
total_stops: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class ClassUnitProgress(BaseModel):
|
|
||||||
"""Overall progress of a class on a unit"""
|
|
||||||
assignment_id: str
|
|
||||||
unit_id: str
|
|
||||||
unit_title: str
|
|
||||||
class_id: str
|
|
||||||
class_name: str
|
|
||||||
total_students: int
|
|
||||||
started_count: int
|
|
||||||
completed_count: int
|
|
||||||
avg_completion_rate: float
|
|
||||||
avg_precheck_score: Optional[float] = None
|
|
||||||
avg_postcheck_score: Optional[float] = None
|
|
||||||
avg_learning_gain: Optional[float] = None
|
|
||||||
avg_time_minutes: float
|
|
||||||
students: List[StudentUnitProgress]
|
|
||||||
|
|
||||||
|
|
||||||
class MisconceptionReport(BaseModel):
|
|
||||||
"""Report of detected misconceptions"""
|
|
||||||
concept_id: str
|
|
||||||
concept_label: str
|
|
||||||
misconception: str
|
|
||||||
affected_students: List[str]
|
|
||||||
frequency: int
|
|
||||||
unit_id: str
|
|
||||||
stop_id: str
|
|
||||||
|
|
||||||
|
|
||||||
class ClassAnalyticsSummary(BaseModel):
|
|
||||||
"""Summary analytics for a class"""
|
|
||||||
class_id: str
|
|
||||||
class_name: str
|
|
||||||
total_units_assigned: int
|
|
||||||
units_completed: int
|
|
||||||
active_units: int
|
|
||||||
avg_completion_rate: float
|
|
||||||
avg_learning_gain: Optional[float]
|
|
||||||
total_time_hours: float
|
|
||||||
top_performers: List[str]
|
|
||||||
struggling_students: List[str]
|
|
||||||
common_misconceptions: List[MisconceptionReport]
|
|
||||||
|
|
||||||
|
|
||||||
class ContentResource(BaseModel):
|
|
||||||
"""Generated content resource"""
|
|
||||||
resource_type: str # "h5p", "pdf", "worksheet"
|
|
||||||
title: str
|
|
||||||
url: str
|
|
||||||
generated_at: datetime
|
|
||||||
unit_id: str
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Auth Dependency
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
async def get_current_teacher(request: Request) -> Dict[str, Any]:
|
|
||||||
"""Get current teacher from JWT token."""
|
|
||||||
if not REQUIRE_AUTH:
|
|
||||||
return {
|
|
||||||
"user_id": "e9484ad9-32ee-4f2b-a4e1-d182e02ccf20",
|
|
||||||
"email": "demo@breakpilot.app",
|
|
||||||
"role": "teacher",
|
|
||||||
"name": "Demo Lehrer"
|
|
||||||
}
|
|
||||||
|
|
||||||
auth_header = request.headers.get("Authorization", "")
|
|
||||||
if not auth_header.startswith("Bearer "):
|
|
||||||
raise HTTPException(status_code=401, detail="Missing authorization token")
|
|
||||||
|
|
||||||
try:
|
|
||||||
import jwt
|
|
||||||
token = auth_header[7:]
|
|
||||||
secret = os.getenv("JWT_SECRET", "dev-secret-key")
|
|
||||||
payload = jwt.decode(token, secret, algorithms=["HS256"])
|
|
||||||
|
|
||||||
if payload.get("role") not in ["teacher", "admin"]:
|
|
||||||
raise HTTPException(status_code=403, detail="Teacher or admin role required")
|
|
||||||
|
|
||||||
return payload
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
raise HTTPException(status_code=401, detail="Token expired")
|
|
||||||
except jwt.InvalidTokenError:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid token")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Database Integration
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
_teacher_db = None
|
|
||||||
|
|
||||||
|
|
||||||
async def get_teacher_database():
|
|
||||||
"""Get teacher database instance with lazy initialization."""
|
|
||||||
global _teacher_db
|
|
||||||
if not USE_DATABASE:
|
|
||||||
return None
|
|
||||||
if _teacher_db is None:
|
|
||||||
try:
|
|
||||||
from unit.database import get_teacher_db
|
|
||||||
_teacher_db = await get_teacher_db()
|
|
||||||
logger.info("Teacher database initialized")
|
|
||||||
except ImportError:
|
|
||||||
logger.warning("Teacher database module not available")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Teacher database not available: {e}")
|
|
||||||
return _teacher_db
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# School Service Integration
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
async def get_classes_for_teacher(teacher_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get classes assigned to a teacher from school service."""
|
|
||||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
|
||||||
try:
|
|
||||||
response = await client.get(
|
|
||||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes",
|
|
||||||
headers={"X-Teacher-ID": teacher_id}
|
|
||||||
)
|
|
||||||
if response.status_code == 200:
|
|
||||||
return response.json()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get classes from school service: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
async def get_students_in_class(class_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get students in a class from school service."""
|
|
||||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
|
||||||
try:
|
|
||||||
response = await client.get(
|
|
||||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes/{class_id}/students"
|
|
||||||
)
|
|
||||||
if response.status_code == 200:
|
|
||||||
return response.json()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get students from school service: {e}")
|
|
||||||
return []
|
|
||||||
|
|||||||
@@ -1,25 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to units/analytics_api.py
|
||||||
Breakpilot Drive - Unit Analytics API — Barrel Re-export.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Erweiterte Analytics fuer Lernfortschritt:
|
_sys.modules[__name__] = _importlib.import_module("units.analytics_api")
|
||||||
- Pre/Post Gain Visualisierung
|
|
||||||
- Misconception-Tracking
|
|
||||||
- Stop-Level Analytics
|
|
||||||
- Aggregierte Klassen-Statistiken
|
|
||||||
- Export-Funktionen
|
|
||||||
|
|
||||||
Split into:
|
|
||||||
- unit_analytics_models.py: Pydantic models & enums
|
|
||||||
- unit_analytics_helpers.py: Database access & computation helpers
|
|
||||||
- unit_analytics_routes.py: Core analytics endpoint handlers
|
|
||||||
- unit_analytics_export.py: Export & dashboard endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from unit_analytics_routes import router as _routes_router
|
|
||||||
from unit_analytics_export import router as _export_router
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/analytics", tags=["Unit Analytics"])
|
|
||||||
router.include_router(_routes_router)
|
|
||||||
router.include_router(_export_router)
|
|
||||||
|
|||||||
@@ -1,145 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to units/analytics_export.py
|
||||||
Unit Analytics API - Export & Dashboard Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Export endpoints for learning gains and misconceptions, plus dashboard overview.
|
_sys.modules[__name__] = _importlib.import_module("units.analytics_export")
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Query
|
|
||||||
from fastapi.responses import Response
|
|
||||||
|
|
||||||
from unit_analytics_models import TimeRange, ExportFormat
|
|
||||||
from unit_analytics_helpers import get_analytics_database
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Unit Analytics"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Export
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/export/learning-gains")
|
|
||||||
async def export_learning_gains(
|
|
||||||
unit_id: Optional[str] = Query(None),
|
|
||||||
class_id: Optional[str] = Query(None),
|
|
||||||
time_range: TimeRange = Query(TimeRange.ALL),
|
|
||||||
format: ExportFormat = Query(ExportFormat.JSON),
|
|
||||||
) -> Any:
|
|
||||||
"""
|
|
||||||
Export learning gain data.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
data = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
data = await db.export_learning_gains(
|
|
||||||
unit_id=unit_id, class_id=class_id, time_range=time_range.value
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to export data: {e}")
|
|
||||||
|
|
||||||
if format == ExportFormat.CSV:
|
|
||||||
if not data:
|
|
||||||
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
|
||||||
else:
|
|
||||||
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
|
||||||
for row in data:
|
|
||||||
csv_content += f"{row['student_id']},{row['unit_id']},{row.get('precheck', '')},{row.get('postcheck', '')},{row.get('gain', '')}\n"
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=csv_content,
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={"Content-Disposition": "attachment; filename=learning_gains.csv"}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"export_date": datetime.utcnow().isoformat(),
|
|
||||||
"filters": {
|
|
||||||
"unit_id": unit_id, "class_id": class_id, "time_range": time_range.value,
|
|
||||||
},
|
|
||||||
"data": data,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/export/misconceptions")
|
|
||||||
async def export_misconceptions(
|
|
||||||
class_id: Optional[str] = Query(None),
|
|
||||||
format: ExportFormat = Query(ExportFormat.JSON),
|
|
||||||
) -> Any:
|
|
||||||
"""
|
|
||||||
Export misconception data for further analysis.
|
|
||||||
"""
|
|
||||||
# Import here to avoid circular dependency
|
|
||||||
from unit_analytics_routes import get_misconception_report
|
|
||||||
|
|
||||||
report = await get_misconception_report(
|
|
||||||
class_id=class_id, unit_id=None,
|
|
||||||
time_range=TimeRange.MONTH, limit=100
|
|
||||||
)
|
|
||||||
|
|
||||||
if format == ExportFormat.CSV:
|
|
||||||
csv_content = "concept_id,concept_label,misconception,frequency,unit_id,stop_id\n"
|
|
||||||
for m in report.most_common:
|
|
||||||
csv_content += f'"{m.concept_id}","{m.concept_label}","{m.misconception_text}",{m.frequency},"{m.unit_id}","{m.stop_id}"\n'
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=csv_content,
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={"Content-Disposition": "attachment; filename=misconceptions.csv"}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"export_date": datetime.utcnow().isoformat(),
|
|
||||||
"class_id": class_id,
|
|
||||||
"total_entries": len(report.most_common),
|
|
||||||
"data": [m.model_dump() for m in report.most_common],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Dashboard Aggregates
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/dashboard/overview")
|
|
||||||
async def get_analytics_overview(
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get high-level analytics overview for dashboard.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
overview = await db.get_analytics_overview(time_range.value)
|
|
||||||
return overview
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get analytics overview: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"time_range": time_range.value,
|
|
||||||
"total_sessions": 0,
|
|
||||||
"unique_students": 0,
|
|
||||||
"avg_completion_rate": 0.0,
|
|
||||||
"avg_learning_gain": 0.0,
|
|
||||||
"most_played_units": [],
|
|
||||||
"struggling_concepts": [],
|
|
||||||
"active_classes": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def health_check() -> Dict[str, Any]:
|
|
||||||
"""Health check for analytics API."""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"service": "unit-analytics",
|
|
||||||
"database": "connected" if db else "disconnected",
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,97 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to units/analytics_helpers.py
|
||||||
Unit Analytics API - Helpers.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Database access, statistical computation, and utility functions.
|
_sys.modules[__name__] = _importlib.import_module("units.analytics_helpers")
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from typing import List, Dict, Optional
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Feature flags
|
|
||||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
|
||||||
|
|
||||||
# Database singleton
|
|
||||||
_analytics_db = None
|
|
||||||
|
|
||||||
|
|
||||||
async def get_analytics_database():
|
|
||||||
"""Get analytics database instance."""
|
|
||||||
global _analytics_db
|
|
||||||
if not USE_DATABASE:
|
|
||||||
return None
|
|
||||||
if _analytics_db is None:
|
|
||||||
try:
|
|
||||||
from unit.database import get_analytics_db
|
|
||||||
_analytics_db = await get_analytics_db()
|
|
||||||
logger.info("Analytics database initialized")
|
|
||||||
except ImportError:
|
|
||||||
logger.warning("Analytics database module not available")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Analytics database not available: {e}")
|
|
||||||
return _analytics_db
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_gain_distribution(gains: List[float]) -> Dict[str, int]:
|
|
||||||
"""Calculate distribution of learning gains into buckets."""
|
|
||||||
distribution = {
|
|
||||||
"< -20%": 0,
|
|
||||||
"-20% to -10%": 0,
|
|
||||||
"-10% to 0%": 0,
|
|
||||||
"0% to 10%": 0,
|
|
||||||
"10% to 20%": 0,
|
|
||||||
"> 20%": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
for gain in gains:
|
|
||||||
gain_percent = gain * 100
|
|
||||||
if gain_percent < -20:
|
|
||||||
distribution["< -20%"] += 1
|
|
||||||
elif gain_percent < -10:
|
|
||||||
distribution["-20% to -10%"] += 1
|
|
||||||
elif gain_percent < 0:
|
|
||||||
distribution["-10% to 0%"] += 1
|
|
||||||
elif gain_percent < 10:
|
|
||||||
distribution["0% to 10%"] += 1
|
|
||||||
elif gain_percent < 20:
|
|
||||||
distribution["10% to 20%"] += 1
|
|
||||||
else:
|
|
||||||
distribution["> 20%"] += 1
|
|
||||||
|
|
||||||
return distribution
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_trend(scores: List[float]) -> str:
|
|
||||||
"""Calculate trend from a series of scores."""
|
|
||||||
if len(scores) < 3:
|
|
||||||
return "insufficient_data"
|
|
||||||
|
|
||||||
# Simple linear regression
|
|
||||||
n = len(scores)
|
|
||||||
x_mean = (n - 1) / 2
|
|
||||||
y_mean = sum(scores) / n
|
|
||||||
|
|
||||||
numerator = sum((i - x_mean) * (scores[i] - y_mean) for i in range(n))
|
|
||||||
denominator = sum((i - x_mean) ** 2 for i in range(n))
|
|
||||||
|
|
||||||
if denominator == 0:
|
|
||||||
return "stable"
|
|
||||||
|
|
||||||
slope = numerator / denominator
|
|
||||||
|
|
||||||
if slope > 0.05:
|
|
||||||
return "improving"
|
|
||||||
elif slope < -0.05:
|
|
||||||
return "declining"
|
|
||||||
else:
|
|
||||||
return "stable"
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_difficulty_rating(success_rate: float, avg_attempts: float) -> float:
|
|
||||||
"""Calculate difficulty rating 1-5 based on success metrics."""
|
|
||||||
# Lower success rate and higher attempts = higher difficulty
|
|
||||||
base_difficulty = (1 - success_rate) * 3 + 1 # 1-4 range
|
|
||||||
attempt_modifier = min(avg_attempts - 1, 1) # 0-1 range
|
|
||||||
return min(5.0, base_difficulty + attempt_modifier)
|
|
||||||
|
|||||||
@@ -1,127 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to units/analytics_models.py
|
||||||
Unit Analytics API - Pydantic Models.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
Data models for learning gains, stop performance, misconceptions,
|
_sys.modules[__name__] = _importlib.import_module("units.analytics_models")
|
||||||
student progress, class comparison, and export.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class TimeRange(str, Enum):
|
|
||||||
"""Time range for analytics queries"""
|
|
||||||
WEEK = "week"
|
|
||||||
MONTH = "month"
|
|
||||||
QUARTER = "quarter"
|
|
||||||
ALL = "all"
|
|
||||||
|
|
||||||
|
|
||||||
class LearningGainData(BaseModel):
|
|
||||||
"""Pre/Post learning gain data point"""
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
unit_id: str
|
|
||||||
precheck_score: float
|
|
||||||
postcheck_score: float
|
|
||||||
learning_gain: float
|
|
||||||
percentile: Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
class LearningGainSummary(BaseModel):
|
|
||||||
"""Aggregated learning gain statistics"""
|
|
||||||
unit_id: str
|
|
||||||
unit_title: str
|
|
||||||
total_students: int
|
|
||||||
avg_precheck: float
|
|
||||||
avg_postcheck: float
|
|
||||||
avg_gain: float
|
|
||||||
median_gain: float
|
|
||||||
std_deviation: float
|
|
||||||
positive_gain_count: int
|
|
||||||
negative_gain_count: int
|
|
||||||
no_change_count: int
|
|
||||||
gain_distribution: Dict[str, int]
|
|
||||||
individual_gains: List[LearningGainData]
|
|
||||||
|
|
||||||
|
|
||||||
class StopPerformance(BaseModel):
|
|
||||||
"""Performance data for a single stop"""
|
|
||||||
stop_id: str
|
|
||||||
stop_label: str
|
|
||||||
attempts_total: int
|
|
||||||
success_rate: float
|
|
||||||
avg_time_seconds: float
|
|
||||||
avg_attempts_before_success: float
|
|
||||||
common_errors: List[str]
|
|
||||||
difficulty_rating: float # 1-5 based on performance
|
|
||||||
|
|
||||||
|
|
||||||
class UnitPerformanceDetail(BaseModel):
|
|
||||||
"""Detailed unit performance breakdown"""
|
|
||||||
unit_id: str
|
|
||||||
unit_title: str
|
|
||||||
template: str
|
|
||||||
total_sessions: int
|
|
||||||
completed_sessions: int
|
|
||||||
completion_rate: float
|
|
||||||
avg_duration_minutes: float
|
|
||||||
stops: List[StopPerformance]
|
|
||||||
bottleneck_stops: List[str] # Stops where students struggle most
|
|
||||||
|
|
||||||
|
|
||||||
class MisconceptionEntry(BaseModel):
|
|
||||||
"""Individual misconception tracking"""
|
|
||||||
concept_id: str
|
|
||||||
concept_label: str
|
|
||||||
misconception_text: str
|
|
||||||
frequency: int
|
|
||||||
affected_student_ids: List[str]
|
|
||||||
unit_id: str
|
|
||||||
stop_id: str
|
|
||||||
detected_via: str # "precheck", "postcheck", "interaction"
|
|
||||||
first_detected: datetime
|
|
||||||
last_detected: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class MisconceptionReport(BaseModel):
|
|
||||||
"""Comprehensive misconception report"""
|
|
||||||
class_id: Optional[str]
|
|
||||||
time_range: str
|
|
||||||
total_misconceptions: int
|
|
||||||
unique_concepts: int
|
|
||||||
most_common: List[MisconceptionEntry]
|
|
||||||
by_unit: Dict[str, List[MisconceptionEntry]]
|
|
||||||
trending_up: List[MisconceptionEntry] # Getting more frequent
|
|
||||||
resolved: List[MisconceptionEntry] # No longer appearing
|
|
||||||
|
|
||||||
|
|
||||||
class StudentProgressTimeline(BaseModel):
|
|
||||||
"""Timeline of student progress"""
|
|
||||||
student_id: str
|
|
||||||
student_name: str
|
|
||||||
units_completed: int
|
|
||||||
total_time_minutes: int
|
|
||||||
avg_score: float
|
|
||||||
trend: str # "improving", "stable", "declining"
|
|
||||||
timeline: List[Dict[str, Any]] # List of session events
|
|
||||||
|
|
||||||
|
|
||||||
class ClassComparisonData(BaseModel):
|
|
||||||
"""Data for comparing class performance"""
|
|
||||||
class_id: str
|
|
||||||
class_name: str
|
|
||||||
student_count: int
|
|
||||||
units_assigned: int
|
|
||||||
avg_completion_rate: float
|
|
||||||
avg_learning_gain: float
|
|
||||||
avg_time_per_unit: float
|
|
||||||
|
|
||||||
|
|
||||||
class ExportFormat(str, Enum):
|
|
||||||
"""Export format options"""
|
|
||||||
JSON = "json"
|
|
||||||
CSV = "csv"
|
|
||||||
|
|||||||
@@ -1,394 +1,4 @@
|
|||||||
"""
|
# Backward-compat shim -- module moved to units/analytics_routes.py
|
||||||
Unit Analytics API - Routes.
|
import importlib as _importlib
|
||||||
|
import sys as _sys
|
||||||
All API endpoints for learning gain, stop-level, misconception,
|
_sys.modules[__name__] = _importlib.import_module("units.analytics_routes")
|
||||||
student timeline, class comparison, export, and dashboard analytics.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import statistics
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, Dict, Any, List
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Query
|
|
||||||
|
|
||||||
from unit_analytics_models import (
|
|
||||||
TimeRange,
|
|
||||||
LearningGainData,
|
|
||||||
LearningGainSummary,
|
|
||||||
StopPerformance,
|
|
||||||
UnitPerformanceDetail,
|
|
||||||
MisconceptionEntry,
|
|
||||||
MisconceptionReport,
|
|
||||||
StudentProgressTimeline,
|
|
||||||
ClassComparisonData,
|
|
||||||
)
|
|
||||||
from unit_analytics_helpers import (
|
|
||||||
get_analytics_database,
|
|
||||||
calculate_gain_distribution,
|
|
||||||
calculate_trend,
|
|
||||||
calculate_difficulty_rating,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(tags=["Unit Analytics"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Learning Gain
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
# NOTE: Static routes must come BEFORE dynamic routes like /{unit_id}
|
|
||||||
@router.get("/learning-gain/compare")
|
|
||||||
async def compare_learning_gains(
|
|
||||||
unit_ids: str = Query(..., description="Comma-separated unit IDs"),
|
|
||||||
class_id: Optional[str] = Query(None),
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Compare learning gains across multiple units.
|
|
||||||
"""
|
|
||||||
unit_list = [u.strip() for u in unit_ids.split(",")]
|
|
||||||
comparisons = []
|
|
||||||
|
|
||||||
for unit_id in unit_list:
|
|
||||||
try:
|
|
||||||
summary = await get_learning_gain_analysis(unit_id, class_id, time_range)
|
|
||||||
comparisons.append({
|
|
||||||
"unit_id": unit_id,
|
|
||||||
"avg_gain": summary.avg_gain,
|
|
||||||
"median_gain": summary.median_gain,
|
|
||||||
"total_students": summary.total_students,
|
|
||||||
"positive_rate": summary.positive_gain_count / max(summary.total_students, 1),
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get comparison for {unit_id}: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"time_range": time_range.value,
|
|
||||||
"class_id": class_id,
|
|
||||||
"comparisons": sorted(comparisons, key=lambda x: x["avg_gain"], reverse=True),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/learning-gain/{unit_id}", response_model=LearningGainSummary)
|
|
||||||
async def get_learning_gain_analysis(
|
|
||||||
unit_id: str,
|
|
||||||
class_id: Optional[str] = Query(None, description="Filter by class"),
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH, description="Time range for analysis"),
|
|
||||||
) -> LearningGainSummary:
|
|
||||||
"""
|
|
||||||
Get detailed pre/post learning gain analysis for a unit.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
individual_gains = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
sessions = await db.get_unit_sessions_with_scores(
|
|
||||||
unit_id=unit_id,
|
|
||||||
class_id=class_id,
|
|
||||||
time_range=time_range.value
|
|
||||||
)
|
|
||||||
|
|
||||||
for session in sessions:
|
|
||||||
if session.get("precheck_score") is not None and session.get("postcheck_score") is not None:
|
|
||||||
gain = session["postcheck_score"] - session["precheck_score"]
|
|
||||||
individual_gains.append(LearningGainData(
|
|
||||||
student_id=session["student_id"],
|
|
||||||
student_name=session.get("student_name", session["student_id"][:8]),
|
|
||||||
unit_id=unit_id,
|
|
||||||
precheck_score=session["precheck_score"],
|
|
||||||
postcheck_score=session["postcheck_score"],
|
|
||||||
learning_gain=gain,
|
|
||||||
))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get learning gain data: {e}")
|
|
||||||
|
|
||||||
# Calculate statistics
|
|
||||||
if not individual_gains:
|
|
||||||
return LearningGainSummary(
|
|
||||||
unit_id=unit_id,
|
|
||||||
unit_title=f"Unit {unit_id}",
|
|
||||||
total_students=0,
|
|
||||||
avg_precheck=0.0, avg_postcheck=0.0,
|
|
||||||
avg_gain=0.0, median_gain=0.0, std_deviation=0.0,
|
|
||||||
positive_gain_count=0, negative_gain_count=0, no_change_count=0,
|
|
||||||
gain_distribution={}, individual_gains=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
gains = [g.learning_gain for g in individual_gains]
|
|
||||||
prechecks = [g.precheck_score for g in individual_gains]
|
|
||||||
postchecks = [g.postcheck_score for g in individual_gains]
|
|
||||||
|
|
||||||
avg_gain = statistics.mean(gains)
|
|
||||||
median_gain = statistics.median(gains)
|
|
||||||
std_dev = statistics.stdev(gains) if len(gains) > 1 else 0.0
|
|
||||||
|
|
||||||
# Calculate percentiles
|
|
||||||
sorted_gains = sorted(gains)
|
|
||||||
for data in individual_gains:
|
|
||||||
rank = sorted_gains.index(data.learning_gain) + 1
|
|
||||||
data.percentile = rank / len(sorted_gains) * 100
|
|
||||||
|
|
||||||
return LearningGainSummary(
|
|
||||||
unit_id=unit_id,
|
|
||||||
unit_title=f"Unit {unit_id}",
|
|
||||||
total_students=len(individual_gains),
|
|
||||||
avg_precheck=statistics.mean(prechecks),
|
|
||||||
avg_postcheck=statistics.mean(postchecks),
|
|
||||||
avg_gain=avg_gain,
|
|
||||||
median_gain=median_gain,
|
|
||||||
std_deviation=std_dev,
|
|
||||||
positive_gain_count=sum(1 for g in gains if g > 0.01),
|
|
||||||
negative_gain_count=sum(1 for g in gains if g < -0.01),
|
|
||||||
no_change_count=sum(1 for g in gains if -0.01 <= g <= 0.01),
|
|
||||||
gain_distribution=calculate_gain_distribution(gains),
|
|
||||||
individual_gains=sorted(individual_gains, key=lambda x: x.learning_gain, reverse=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Stop-Level Analytics
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/unit/{unit_id}/stops", response_model=UnitPerformanceDetail)
|
|
||||||
async def get_unit_stop_analytics(
|
|
||||||
unit_id: str,
|
|
||||||
class_id: Optional[str] = Query(None),
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
|
||||||
) -> UnitPerformanceDetail:
|
|
||||||
"""
|
|
||||||
Get detailed stop-level performance analytics.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
stops_data = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
stop_stats = await db.get_stop_performance(
|
|
||||||
unit_id=unit_id, class_id=class_id, time_range=time_range.value
|
|
||||||
)
|
|
||||||
|
|
||||||
for stop in stop_stats:
|
|
||||||
difficulty = calculate_difficulty_rating(
|
|
||||||
stop.get("success_rate", 0.5),
|
|
||||||
stop.get("avg_attempts", 1.0)
|
|
||||||
)
|
|
||||||
stops_data.append(StopPerformance(
|
|
||||||
stop_id=stop["stop_id"],
|
|
||||||
stop_label=stop.get("stop_label", stop["stop_id"]),
|
|
||||||
attempts_total=stop.get("total_attempts", 0),
|
|
||||||
success_rate=stop.get("success_rate", 0.0),
|
|
||||||
avg_time_seconds=stop.get("avg_time_seconds", 0.0),
|
|
||||||
avg_attempts_before_success=stop.get("avg_attempts", 1.0),
|
|
||||||
common_errors=stop.get("common_errors", []),
|
|
||||||
difficulty_rating=difficulty,
|
|
||||||
))
|
|
||||||
|
|
||||||
unit_stats = await db.get_unit_overall_stats(unit_id, class_id, time_range.value)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get stop analytics: {e}")
|
|
||||||
unit_stats = {}
|
|
||||||
else:
|
|
||||||
unit_stats = {}
|
|
||||||
|
|
||||||
# Identify bottleneck stops
|
|
||||||
bottlenecks = [
|
|
||||||
s.stop_id for s in stops_data
|
|
||||||
if s.difficulty_rating > 3.5 or s.success_rate < 0.6
|
|
||||||
]
|
|
||||||
|
|
||||||
return UnitPerformanceDetail(
|
|
||||||
unit_id=unit_id,
|
|
||||||
unit_title=f"Unit {unit_id}",
|
|
||||||
template=unit_stats.get("template", "unknown"),
|
|
||||||
total_sessions=unit_stats.get("total_sessions", 0),
|
|
||||||
completed_sessions=unit_stats.get("completed_sessions", 0),
|
|
||||||
completion_rate=unit_stats.get("completion_rate", 0.0),
|
|
||||||
avg_duration_minutes=unit_stats.get("avg_duration_minutes", 0.0),
|
|
||||||
stops=stops_data,
|
|
||||||
bottleneck_stops=bottlenecks,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Misconception Tracking
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/misconceptions", response_model=MisconceptionReport)
|
|
||||||
async def get_misconception_report(
|
|
||||||
class_id: Optional[str] = Query(None),
|
|
||||||
unit_id: Optional[str] = Query(None),
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
|
||||||
limit: int = Query(20, ge=1, le=100),
|
|
||||||
) -> MisconceptionReport:
|
|
||||||
"""
|
|
||||||
Get comprehensive misconception report.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
misconceptions = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
raw_misconceptions = await db.get_misconceptions(
|
|
||||||
class_id=class_id, unit_id=unit_id,
|
|
||||||
time_range=time_range.value, limit=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
for m in raw_misconceptions:
|
|
||||||
misconceptions.append(MisconceptionEntry(
|
|
||||||
concept_id=m["concept_id"],
|
|
||||||
concept_label=m["concept_label"],
|
|
||||||
misconception_text=m["misconception_text"],
|
|
||||||
frequency=m["frequency"],
|
|
||||||
affected_student_ids=m.get("student_ids", []),
|
|
||||||
unit_id=m["unit_id"],
|
|
||||||
stop_id=m["stop_id"],
|
|
||||||
detected_via=m.get("detected_via", "unknown"),
|
|
||||||
first_detected=m.get("first_detected", datetime.utcnow()),
|
|
||||||
last_detected=m.get("last_detected", datetime.utcnow()),
|
|
||||||
))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get misconceptions: {e}")
|
|
||||||
|
|
||||||
# Group by unit
|
|
||||||
by_unit = {}
|
|
||||||
for m in misconceptions:
|
|
||||||
if m.unit_id not in by_unit:
|
|
||||||
by_unit[m.unit_id] = []
|
|
||||||
by_unit[m.unit_id].append(m)
|
|
||||||
|
|
||||||
trending_up = misconceptions[:3] if misconceptions else []
|
|
||||||
resolved = []
|
|
||||||
|
|
||||||
return MisconceptionReport(
|
|
||||||
class_id=class_id,
|
|
||||||
time_range=time_range.value,
|
|
||||||
total_misconceptions=sum(m.frequency for m in misconceptions),
|
|
||||||
unique_concepts=len(set(m.concept_id for m in misconceptions)),
|
|
||||||
most_common=sorted(misconceptions, key=lambda x: x.frequency, reverse=True)[:10],
|
|
||||||
by_unit=by_unit,
|
|
||||||
trending_up=trending_up,
|
|
||||||
resolved=resolved,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/misconceptions/student/{student_id}")
|
|
||||||
async def get_student_misconceptions(
|
|
||||||
student_id: str,
|
|
||||||
time_range: TimeRange = Query(TimeRange.ALL),
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get misconceptions for a specific student.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
misconceptions = await db.get_student_misconceptions(
|
|
||||||
student_id=student_id, time_range=time_range.value
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
"student_id": student_id,
|
|
||||||
"misconceptions": misconceptions,
|
|
||||||
"recommended_remediation": [
|
|
||||||
{"concept": m["concept_label"], "activity": f"Review {m['unit_id']}/{m['stop_id']}"}
|
|
||||||
for m in misconceptions[:5]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get student misconceptions: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"student_id": student_id,
|
|
||||||
"misconceptions": [],
|
|
||||||
"recommended_remediation": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Student Progress Timeline
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/student/{student_id}/timeline", response_model=StudentProgressTimeline)
|
|
||||||
async def get_student_timeline(
|
|
||||||
student_id: str,
|
|
||||||
time_range: TimeRange = Query(TimeRange.ALL),
|
|
||||||
) -> StudentProgressTimeline:
|
|
||||||
"""
|
|
||||||
Get detailed progress timeline for a student.
|
|
||||||
"""
|
|
||||||
db = await get_analytics_database()
|
|
||||||
timeline = []
|
|
||||||
scores = []
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
sessions = await db.get_student_sessions(
|
|
||||||
student_id=student_id, time_range=time_range.value
|
|
||||||
)
|
|
||||||
|
|
||||||
for session in sessions:
|
|
||||||
timeline.append({
|
|
||||||
"date": session.get("started_at"),
|
|
||||||
"unit_id": session.get("unit_id"),
|
|
||||||
"completed": session.get("completed_at") is not None,
|
|
||||||
"precheck": session.get("precheck_score"),
|
|
||||||
"postcheck": session.get("postcheck_score"),
|
|
||||||
"duration_minutes": session.get("duration_seconds", 0) // 60,
|
|
||||||
})
|
|
||||||
if session.get("postcheck_score") is not None:
|
|
||||||
scores.append(session["postcheck_score"])
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get student timeline: {e}")
|
|
||||||
|
|
||||||
trend = calculate_trend(scores) if scores else "insufficient_data"
|
|
||||||
|
|
||||||
return StudentProgressTimeline(
|
|
||||||
student_id=student_id,
|
|
||||||
student_name=f"Student {student_id[:8]}",
|
|
||||||
units_completed=sum(1 for t in timeline if t["completed"]),
|
|
||||||
total_time_minutes=sum(t["duration_minutes"] for t in timeline),
|
|
||||||
avg_score=statistics.mean(scores) if scores else 0.0,
|
|
||||||
trend=trend,
|
|
||||||
timeline=timeline,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# API Endpoints - Class Comparison
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/compare/classes", response_model=List[ClassComparisonData])
|
|
||||||
async def compare_classes(
|
|
||||||
class_ids: str = Query(..., description="Comma-separated class IDs"),
|
|
||||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
|
||||||
) -> List[ClassComparisonData]:
|
|
||||||
"""
|
|
||||||
Compare performance across multiple classes.
|
|
||||||
"""
|
|
||||||
class_list = [c.strip() for c in class_ids.split(",")]
|
|
||||||
comparisons = []
|
|
||||||
|
|
||||||
db = await get_analytics_database()
|
|
||||||
if db:
|
|
||||||
for class_id in class_list:
|
|
||||||
try:
|
|
||||||
stats = await db.get_class_aggregate_stats(class_id, time_range.value)
|
|
||||||
comparisons.append(ClassComparisonData(
|
|
||||||
class_id=class_id,
|
|
||||||
class_name=stats.get("class_name", f"Klasse {class_id[:8]}"),
|
|
||||||
student_count=stats.get("student_count", 0),
|
|
||||||
units_assigned=stats.get("units_assigned", 0),
|
|
||||||
avg_completion_rate=stats.get("avg_completion_rate", 0.0),
|
|
||||||
avg_learning_gain=stats.get("avg_learning_gain", 0.0),
|
|
||||||
avg_time_per_unit=stats.get("avg_time_per_unit", 0.0),
|
|
||||||
))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get stats for class {class_id}: {e}")
|
|
||||||
|
|
||||||
return sorted(comparisons, key=lambda x: x.avg_learning_gain, reverse=True)
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,57 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/api.py
|
||||||
# Breakpilot Drive - Unit API (barrel re-export)
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# This module was split into:
|
_sys.modules[__name__] = _importlib.import_module("units.api")
|
||||||
# - unit_models.py (Pydantic models)
|
|
||||||
# - unit_helpers.py (Auth, DB, token, validation helpers)
|
|
||||||
# - unit_routes.py (Definition, session, analytics routes)
|
|
||||||
# - unit_content_routes.py (H5P, worksheet, PDF routes)
|
|
||||||
#
|
|
||||||
# The `router` object is assembled here by including all sub-routers.
|
|
||||||
# Importers that did `from unit_api import router` continue to work.
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from unit_routes import router as _routes_router
|
|
||||||
from unit_definition_routes import router as _definition_router
|
|
||||||
from unit_content_routes import router as _content_router
|
|
||||||
|
|
||||||
# Re-export models for any direct importers
|
|
||||||
from unit_models import ( # noqa: F401
|
|
||||||
UnitDefinitionResponse,
|
|
||||||
CreateSessionRequest,
|
|
||||||
SessionResponse,
|
|
||||||
TelemetryEvent,
|
|
||||||
TelemetryPayload,
|
|
||||||
TelemetryResponse,
|
|
||||||
PostcheckAnswer,
|
|
||||||
CompleteSessionRequest,
|
|
||||||
SessionSummaryResponse,
|
|
||||||
UnitListItem,
|
|
||||||
RecommendedUnit,
|
|
||||||
CreateUnitRequest,
|
|
||||||
UpdateUnitRequest,
|
|
||||||
ValidationError,
|
|
||||||
ValidationResult,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Re-export helpers for any direct importers
|
|
||||||
from unit_helpers import ( # noqa: F401
|
|
||||||
get_optional_current_user,
|
|
||||||
get_unit_database,
|
|
||||||
create_session_token,
|
|
||||||
verify_session_token,
|
|
||||||
get_session_from_token,
|
|
||||||
validate_unit_definition,
|
|
||||||
USE_DATABASE,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
SECRET_KEY,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assemble the combined router.
|
|
||||||
# _routes_router and _content_router both use prefix="/api/units",
|
|
||||||
# so we create a plain router and include them without extra prefix.
|
|
||||||
router = APIRouter()
|
|
||||||
router.include_router(_routes_router)
|
|
||||||
router.include_router(_definition_router)
|
|
||||||
router.include_router(_content_router)
|
|
||||||
|
|||||||
@@ -1,160 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/content_routes.py
|
||||||
# Breakpilot Drive - Unit Content Generation Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# API endpoints for H5P content, worksheets, and PDF generation.
|
_sys.modules[__name__] = _importlib.import_module("units.content_routes")
|
||||||
# Extracted from unit_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from unit_models import UnitDefinitionResponse
|
|
||||||
from unit_helpers import get_optional_current_user, get_unit_database
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/content/{unit_id}/h5p")
|
|
||||||
async def generate_h5p_content(
|
|
||||||
unit_id: str,
|
|
||||||
locale: str = Query("de-DE", description="Target locale"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Generate H5P content items for a unit.
|
|
||||||
|
|
||||||
Returns H5P-compatible content structures for:
|
|
||||||
- Drag and Drop (vocabulary matching)
|
|
||||||
- Fill in the Blanks (concept texts)
|
|
||||||
- Multiple Choice (misconception targeting)
|
|
||||||
"""
|
|
||||||
from content_generators import generate_h5p_for_unit, H5PGenerator, generate_h5p_manifest
|
|
||||||
|
|
||||||
# Get unit definition
|
|
||||||
db = await get_unit_database()
|
|
||||||
unit_def = None
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
unit = await db.get_unit_definition(unit_id)
|
|
||||||
if unit:
|
|
||||||
unit_def = unit.get("definition", {})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get unit for H5P generation: {e}")
|
|
||||||
|
|
||||||
if not unit_def:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
generator = H5PGenerator(locale=locale)
|
|
||||||
contents = generator.generate_from_unit(unit_def)
|
|
||||||
manifest = generate_h5p_manifest(contents, unit_id)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"unit_id": unit_id,
|
|
||||||
"locale": locale,
|
|
||||||
"generated_count": len(contents),
|
|
||||||
"manifest": manifest,
|
|
||||||
"contents": [c.to_h5p_structure() for c in contents]
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"H5P generation failed: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"H5P generation failed: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/content/{unit_id}/worksheet")
|
|
||||||
async def generate_worksheet_html(
|
|
||||||
unit_id: str,
|
|
||||||
locale: str = Query("de-DE", description="Target locale"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Generate worksheet HTML for a unit.
|
|
||||||
|
|
||||||
Returns HTML that can be:
|
|
||||||
- Displayed in browser
|
|
||||||
- Converted to PDF using weasyprint
|
|
||||||
- Printed directly
|
|
||||||
"""
|
|
||||||
from content_generators import PDFGenerator
|
|
||||||
|
|
||||||
# Get unit definition
|
|
||||||
db = await get_unit_database()
|
|
||||||
unit_def = None
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
unit = await db.get_unit_definition(unit_id)
|
|
||||||
if unit:
|
|
||||||
unit_def = unit.get("definition", {})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get unit for worksheet generation: {e}")
|
|
||||||
|
|
||||||
if not unit_def:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
generator = PDFGenerator(locale=locale)
|
|
||||||
worksheet = generator.generate_from_unit(unit_def)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"unit_id": unit_id,
|
|
||||||
"locale": locale,
|
|
||||||
"title": worksheet.title,
|
|
||||||
"sections": len(worksheet.sections),
|
|
||||||
"html": worksheet.to_html()
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Worksheet generation failed: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Worksheet generation failed: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/content/{unit_id}/worksheet.pdf")
|
|
||||||
async def download_worksheet_pdf(
|
|
||||||
unit_id: str,
|
|
||||||
locale: str = Query("de-DE", description="Target locale"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Generate and download worksheet as PDF.
|
|
||||||
|
|
||||||
Requires weasyprint to be installed on the server.
|
|
||||||
"""
|
|
||||||
from fastapi.responses import Response
|
|
||||||
|
|
||||||
# Get unit definition
|
|
||||||
db = await get_unit_database()
|
|
||||||
unit_def = None
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
unit = await db.get_unit_definition(unit_id)
|
|
||||||
if unit:
|
|
||||||
unit_def = unit.get("definition", {})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get unit for PDF generation: {e}")
|
|
||||||
|
|
||||||
if not unit_def:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
from content_generators import generate_worksheet_pdf
|
|
||||||
pdf_bytes = generate_worksheet_pdf(unit_def, locale)
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
content=pdf_bytes,
|
|
||||||
media_type="application/pdf",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f'attachment; filename="{unit_id}_worksheet.pdf"'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=501,
|
|
||||||
detail="PDF generation not available. Install weasyprint: pip install weasyprint"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"PDF generation failed: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"PDF generation failed: {str(e)}")
|
|
||||||
|
|||||||
@@ -1,301 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/definition_routes.py
|
||||||
# Breakpilot Drive - Unit Definition CRUD Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Endpoints for creating, updating, deleting, and validating
|
_sys.modules[__name__] = _importlib.import_module("units.definition_routes")
|
||||||
# unit definitions. Extracted from unit_routes.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from unit_models import (
|
|
||||||
UnitDefinitionResponse,
|
|
||||||
CreateUnitRequest,
|
|
||||||
UpdateUnitRequest,
|
|
||||||
ValidationResult,
|
|
||||||
)
|
|
||||||
from unit_helpers import (
|
|
||||||
get_optional_current_user,
|
|
||||||
get_unit_database,
|
|
||||||
validate_unit_definition,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/definitions", response_model=UnitDefinitionResponse)
|
|
||||||
async def create_unit_definition(
|
|
||||||
request_data: CreateUnitRequest,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> UnitDefinitionResponse:
|
|
||||||
"""
|
|
||||||
Create a new unit definition.
|
|
||||||
|
|
||||||
- Validates unit structure
|
|
||||||
- Saves to database or JSON file
|
|
||||||
- Returns created unit
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Build full definition
|
|
||||||
definition = {
|
|
||||||
"unit_id": request_data.unit_id,
|
|
||||||
"template": request_data.template,
|
|
||||||
"version": request_data.version,
|
|
||||||
"locale": request_data.locale,
|
|
||||||
"grade_band": request_data.grade_band,
|
|
||||||
"duration_minutes": request_data.duration_minutes,
|
|
||||||
"difficulty": request_data.difficulty,
|
|
||||||
"subject": request_data.subject,
|
|
||||||
"topic": request_data.topic,
|
|
||||||
"learning_objectives": request_data.learning_objectives,
|
|
||||||
"stops": request_data.stops,
|
|
||||||
"precheck": request_data.precheck or {
|
|
||||||
"question_set_id": f"{request_data.unit_id}_precheck",
|
|
||||||
"required": True,
|
|
||||||
"time_limit_seconds": 120
|
|
||||||
},
|
|
||||||
"postcheck": request_data.postcheck or {
|
|
||||||
"question_set_id": f"{request_data.unit_id}_postcheck",
|
|
||||||
"required": True,
|
|
||||||
"time_limit_seconds": 180
|
|
||||||
},
|
|
||||||
"teacher_controls": request_data.teacher_controls or {
|
|
||||||
"allow_skip": True,
|
|
||||||
"allow_replay": True,
|
|
||||||
"max_time_per_stop_sec": 90,
|
|
||||||
"show_hints": True,
|
|
||||||
"require_precheck": True,
|
|
||||||
"require_postcheck": True
|
|
||||||
},
|
|
||||||
"assets": request_data.assets or {},
|
|
||||||
"metadata": request_data.metadata or {
|
|
||||||
"author": user.get("email", "Unknown") if user else "Unknown",
|
|
||||||
"created": datetime.utcnow().isoformat(),
|
|
||||||
"curriculum_reference": ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Validate
|
|
||||||
validation = validate_unit_definition(definition)
|
|
||||||
if not validation.valid:
|
|
||||||
error_msgs = [f"{e.field}: {e.message}" for e in validation.errors]
|
|
||||||
raise HTTPException(status_code=400, detail=f"Validierung fehlgeschlagen: {'; '.join(error_msgs)}")
|
|
||||||
|
|
||||||
# Check if unit_id already exists
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
existing = await db.get_unit_definition(request_data.unit_id)
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
|
||||||
|
|
||||||
# Save to database
|
|
||||||
await db.create_unit_definition(
|
|
||||||
unit_id=request_data.unit_id,
|
|
||||||
template=request_data.template,
|
|
||||||
version=request_data.version,
|
|
||||||
locale=request_data.locale,
|
|
||||||
grade_band=request_data.grade_band,
|
|
||||||
duration_minutes=request_data.duration_minutes,
|
|
||||||
difficulty=request_data.difficulty,
|
|
||||||
definition=definition,
|
|
||||||
status=request_data.status
|
|
||||||
)
|
|
||||||
logger.info(f"Unit created in database: {request_data.unit_id}")
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Database save failed, using JSON fallback: {e}")
|
|
||||||
# Fallback to JSON
|
|
||||||
units_dir = Path(__file__).parent / "data" / "units"
|
|
||||||
units_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
json_path = units_dir / f"{request_data.unit_id}.json"
|
|
||||||
if json_path.exists():
|
|
||||||
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
|
||||||
with open(json_path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(definition, f, ensure_ascii=False, indent=2)
|
|
||||||
logger.info(f"Unit created as JSON: {json_path}")
|
|
||||||
else:
|
|
||||||
# JSON only mode
|
|
||||||
units_dir = Path(__file__).parent / "data" / "units"
|
|
||||||
units_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
json_path = units_dir / f"{request_data.unit_id}.json"
|
|
||||||
if json_path.exists():
|
|
||||||
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
|
||||||
with open(json_path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(definition, f, ensure_ascii=False, indent=2)
|
|
||||||
logger.info(f"Unit created as JSON: {json_path}")
|
|
||||||
|
|
||||||
return UnitDefinitionResponse(
|
|
||||||
unit_id=request_data.unit_id,
|
|
||||||
template=request_data.template,
|
|
||||||
version=request_data.version,
|
|
||||||
locale=request_data.locale,
|
|
||||||
grade_band=request_data.grade_band,
|
|
||||||
duration_minutes=request_data.duration_minutes,
|
|
||||||
difficulty=request_data.difficulty,
|
|
||||||
definition=definition
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/definitions/{unit_id}", response_model=UnitDefinitionResponse)
|
|
||||||
async def update_unit_definition(
|
|
||||||
unit_id: str,
|
|
||||||
request_data: UpdateUnitRequest,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> UnitDefinitionResponse:
|
|
||||||
"""
|
|
||||||
Update an existing unit definition.
|
|
||||||
|
|
||||||
- Merges updates with existing definition
|
|
||||||
- Re-validates
|
|
||||||
- Saves updated version
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Get existing unit
|
|
||||||
db = await get_unit_database()
|
|
||||||
existing = None
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
existing = await db.get_unit_definition(unit_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Database read failed: {e}")
|
|
||||||
|
|
||||||
if not existing:
|
|
||||||
# Try JSON file
|
|
||||||
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
|
||||||
if json_path.exists():
|
|
||||||
with open(json_path, "r", encoding="utf-8") as f:
|
|
||||||
file_data = json.load(f)
|
|
||||||
existing = {
|
|
||||||
"unit_id": file_data.get("unit_id"),
|
|
||||||
"template": file_data.get("template"),
|
|
||||||
"version": file_data.get("version", "1.0.0"),
|
|
||||||
"locale": file_data.get("locale", ["de-DE"]),
|
|
||||||
"grade_band": file_data.get("grade_band", []),
|
|
||||||
"duration_minutes": file_data.get("duration_minutes", 8),
|
|
||||||
"difficulty": file_data.get("difficulty", "base"),
|
|
||||||
"definition": file_data
|
|
||||||
}
|
|
||||||
|
|
||||||
if not existing:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit nicht gefunden: {unit_id}")
|
|
||||||
|
|
||||||
# Merge updates into existing definition
|
|
||||||
definition = existing.get("definition", {})
|
|
||||||
update_dict = request_data.model_dump(exclude_unset=True)
|
|
||||||
|
|
||||||
for key, value in update_dict.items():
|
|
||||||
if value is not None:
|
|
||||||
definition[key] = value
|
|
||||||
|
|
||||||
# Validate updated definition
|
|
||||||
validation = validate_unit_definition(definition)
|
|
||||||
if not validation.valid:
|
|
||||||
error_msgs = [f"{e.field}: {e.message}" for e in validation.errors]
|
|
||||||
raise HTTPException(status_code=400, detail=f"Validierung fehlgeschlagen: {'; '.join(error_msgs)}")
|
|
||||||
|
|
||||||
# Save
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
await db.update_unit_definition(
|
|
||||||
unit_id=unit_id,
|
|
||||||
version=definition.get("version"),
|
|
||||||
locale=definition.get("locale"),
|
|
||||||
grade_band=definition.get("grade_band"),
|
|
||||||
duration_minutes=definition.get("duration_minutes"),
|
|
||||||
difficulty=definition.get("difficulty"),
|
|
||||||
definition=definition,
|
|
||||||
status=update_dict.get("status")
|
|
||||||
)
|
|
||||||
logger.info(f"Unit updated in database: {unit_id}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Database update failed, using JSON: {e}")
|
|
||||||
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
|
||||||
with open(json_path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(definition, f, ensure_ascii=False, indent=2)
|
|
||||||
else:
|
|
||||||
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
|
||||||
with open(json_path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(definition, f, ensure_ascii=False, indent=2)
|
|
||||||
logger.info(f"Unit updated as JSON: {json_path}")
|
|
||||||
|
|
||||||
return UnitDefinitionResponse(
|
|
||||||
unit_id=unit_id,
|
|
||||||
template=definition.get("template", existing.get("template")),
|
|
||||||
version=definition.get("version", existing.get("version", "1.0.0")),
|
|
||||||
locale=definition.get("locale", existing.get("locale", ["de-DE"])),
|
|
||||||
grade_band=definition.get("grade_band", existing.get("grade_band", [])),
|
|
||||||
duration_minutes=definition.get("duration_minutes", existing.get("duration_minutes", 8)),
|
|
||||||
difficulty=definition.get("difficulty", existing.get("difficulty", "base")),
|
|
||||||
definition=definition
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/definitions/{unit_id}")
|
|
||||||
async def delete_unit_definition(
|
|
||||||
unit_id: str,
|
|
||||||
force: bool = Query(False, description="Force delete even if published"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Delete a unit definition.
|
|
||||||
|
|
||||||
- By default, only drafts can be deleted
|
|
||||||
- Use force=true to delete published units
|
|
||||||
"""
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
db = await get_unit_database()
|
|
||||||
deleted = False
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
existing = await db.get_unit_definition(unit_id)
|
|
||||||
if existing:
|
|
||||||
status = existing.get("status", "draft")
|
|
||||||
if status == "published" and not force:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Veroeffentlichte Units koennen nicht geloescht werden. Verwende force=true."
|
|
||||||
)
|
|
||||||
await db.delete_unit_definition(unit_id)
|
|
||||||
deleted = True
|
|
||||||
logger.info(f"Unit deleted from database: {unit_id}")
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Database delete failed: {e}")
|
|
||||||
|
|
||||||
# Also check JSON file
|
|
||||||
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
|
||||||
if json_path.exists():
|
|
||||||
json_path.unlink()
|
|
||||||
deleted = True
|
|
||||||
logger.info(f"Unit JSON deleted: {json_path}")
|
|
||||||
|
|
||||||
if not deleted:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit nicht gefunden: {unit_id}")
|
|
||||||
|
|
||||||
return {"success": True, "unit_id": unit_id, "message": "Unit geloescht"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/definitions/validate", response_model=ValidationResult)
|
|
||||||
async def validate_unit(
|
|
||||||
unit_data: Dict[str, Any],
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> ValidationResult:
|
|
||||||
"""
|
|
||||||
Validate a unit definition without saving.
|
|
||||||
|
|
||||||
Returns validation result with errors and warnings.
|
|
||||||
"""
|
|
||||||
return validate_unit_definition(unit_data)
|
|
||||||
|
|||||||
@@ -1,204 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/helpers.py
|
||||||
# Breakpilot Drive - Unit API Helpers
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Auth, database, token, and validation helpers for the Unit API.
|
_sys.modules[__name__] = _importlib.import_module("units.helpers")
|
||||||
# Extracted from unit_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import HTTPException, Request
|
|
||||||
from typing import Optional, Dict, Any, List
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
from unit_models import ValidationError, ValidationResult
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Feature flags
|
|
||||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
|
||||||
REQUIRE_AUTH = os.getenv("GAME_REQUIRE_AUTH", "false").lower() == "true"
|
|
||||||
SECRET_KEY = os.getenv("JWT_SECRET_KEY", "dev-secret-key-change-in-production")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Auth Dependency (reuse from game_api)
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
async def get_optional_current_user(request: Request) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Optional auth dependency for Unit API."""
|
|
||||||
if not REQUIRE_AUTH:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
from auth import get_current_user
|
|
||||||
return await get_current_user(request)
|
|
||||||
except ImportError:
|
|
||||||
logger.warning("Auth module not available")
|
|
||||||
return None
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Auth error: {e}")
|
|
||||||
raise HTTPException(status_code=401, detail="Authentication failed")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Database Integration
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
_unit_db = None
|
|
||||||
|
|
||||||
async def get_unit_database():
|
|
||||||
"""Get unit database instance with lazy initialization."""
|
|
||||||
global _unit_db
|
|
||||||
if not USE_DATABASE:
|
|
||||||
return None
|
|
||||||
if _unit_db is None:
|
|
||||||
try:
|
|
||||||
from unit.database import get_unit_db
|
|
||||||
_unit_db = await get_unit_db()
|
|
||||||
logger.info("Unit database initialized")
|
|
||||||
except ImportError:
|
|
||||||
logger.warning("Unit database module not available")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Unit database not available: {e}")
|
|
||||||
return _unit_db
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Token Helpers
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
def create_session_token(session_id: str, student_id: str, expires_hours: int = 4) -> str:
|
|
||||||
"""Create a JWT session token for telemetry authentication."""
|
|
||||||
payload = {
|
|
||||||
"session_id": session_id,
|
|
||||||
"student_id": student_id,
|
|
||||||
"exp": datetime.utcnow() + timedelta(hours=expires_hours),
|
|
||||||
"iat": datetime.utcnow(),
|
|
||||||
}
|
|
||||||
return jwt.encode(payload, SECRET_KEY, algorithm="HS256")
|
|
||||||
|
|
||||||
|
|
||||||
def verify_session_token(token: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Verify a session token and return payload."""
|
|
||||||
try:
|
|
||||||
return jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
return None
|
|
||||||
except jwt.InvalidTokenError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
async def get_session_from_token(request: Request) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Extract and verify session from Authorization header."""
|
|
||||||
auth_header = request.headers.get("Authorization", "")
|
|
||||||
if not auth_header.startswith("Bearer "):
|
|
||||||
return None
|
|
||||||
token = auth_header[7:]
|
|
||||||
return verify_session_token(token)
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Validation
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
def validate_unit_definition(unit_data: Dict[str, Any]) -> ValidationResult:
|
|
||||||
"""
|
|
||||||
Validate a unit definition structure.
|
|
||||||
|
|
||||||
Returns validation result with errors and warnings.
|
|
||||||
"""
|
|
||||||
errors: List[ValidationError] = []
|
|
||||||
warnings: List[ValidationError] = []
|
|
||||||
|
|
||||||
# Required fields
|
|
||||||
if not unit_data.get("unit_id"):
|
|
||||||
errors.append(ValidationError(field="unit_id", message="unit_id ist erforderlich"))
|
|
||||||
|
|
||||||
if not unit_data.get("template"):
|
|
||||||
errors.append(ValidationError(field="template", message="template ist erforderlich"))
|
|
||||||
elif unit_data["template"] not in ["flight_path", "station_loop"]:
|
|
||||||
errors.append(ValidationError(
|
|
||||||
field="template",
|
|
||||||
message="template muss 'flight_path' oder 'station_loop' sein"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Validate stops
|
|
||||||
stops = unit_data.get("stops", [])
|
|
||||||
if not stops:
|
|
||||||
errors.append(ValidationError(field="stops", message="Mindestens 1 Stop erforderlich"))
|
|
||||||
else:
|
|
||||||
# Check minimum stops for flight_path
|
|
||||||
if unit_data.get("template") == "flight_path" and len(stops) < 3:
|
|
||||||
warnings.append(ValidationError(
|
|
||||||
field="stops",
|
|
||||||
message="FlightPath sollte mindestens 3 Stops haben",
|
|
||||||
severity="warning"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Validate each stop
|
|
||||||
stop_ids = set()
|
|
||||||
for i, stop in enumerate(stops):
|
|
||||||
if not stop.get("stop_id"):
|
|
||||||
errors.append(ValidationError(
|
|
||||||
field=f"stops[{i}].stop_id",
|
|
||||||
message=f"Stop {i}: stop_id fehlt"
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
if stop["stop_id"] in stop_ids:
|
|
||||||
errors.append(ValidationError(
|
|
||||||
field=f"stops[{i}].stop_id",
|
|
||||||
message=f"Stop {i}: Doppelte stop_id '{stop['stop_id']}'"
|
|
||||||
))
|
|
||||||
stop_ids.add(stop["stop_id"])
|
|
||||||
|
|
||||||
# Check interaction type
|
|
||||||
interaction = stop.get("interaction", {})
|
|
||||||
if not interaction.get("type"):
|
|
||||||
errors.append(ValidationError(
|
|
||||||
field=f"stops[{i}].interaction.type",
|
|
||||||
message=f"Stop {stop.get('stop_id', i)}: Interaktionstyp fehlt"
|
|
||||||
))
|
|
||||||
elif interaction["type"] not in [
|
|
||||||
"aim_and_pass", "slider_adjust", "slider_equivalence",
|
|
||||||
"sequence_arrange", "toggle_switch", "drag_match",
|
|
||||||
"error_find", "transfer_apply"
|
|
||||||
]:
|
|
||||||
warnings.append(ValidationError(
|
|
||||||
field=f"stops[{i}].interaction.type",
|
|
||||||
message=f"Stop {stop.get('stop_id', i)}: Unbekannter Interaktionstyp '{interaction['type']}'",
|
|
||||||
severity="warning"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Check for label
|
|
||||||
if not stop.get("label"):
|
|
||||||
warnings.append(ValidationError(
|
|
||||||
field=f"stops[{i}].label",
|
|
||||||
message=f"Stop {stop.get('stop_id', i)}: Label fehlt",
|
|
||||||
severity="warning"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Validate duration
|
|
||||||
duration = unit_data.get("duration_minutes", 0)
|
|
||||||
if duration < 3 or duration > 20:
|
|
||||||
warnings.append(ValidationError(
|
|
||||||
field="duration_minutes",
|
|
||||||
message="Dauer sollte zwischen 3 und 20 Minuten liegen",
|
|
||||||
severity="warning"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Validate difficulty
|
|
||||||
if unit_data.get("difficulty") and unit_data["difficulty"] not in ["base", "advanced"]:
|
|
||||||
warnings.append(ValidationError(
|
|
||||||
field="difficulty",
|
|
||||||
message="difficulty sollte 'base' oder 'advanced' sein",
|
|
||||||
severity="warning"
|
|
||||||
))
|
|
||||||
|
|
||||||
return ValidationResult(
|
|
||||||
valid=len(errors) == 0,
|
|
||||||
errors=errors,
|
|
||||||
warnings=warnings
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,149 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/models.py
|
||||||
# Breakpilot Drive - Unit API Models
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Pydantic models for the Unit API.
|
_sys.modules[__name__] = _importlib.import_module("units.models")
|
||||||
# Extracted from unit_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class UnitDefinitionResponse(BaseModel):
|
|
||||||
"""Unit definition response"""
|
|
||||||
unit_id: str
|
|
||||||
template: str
|
|
||||||
version: str
|
|
||||||
locale: List[str]
|
|
||||||
grade_band: List[str]
|
|
||||||
duration_minutes: int
|
|
||||||
difficulty: str
|
|
||||||
definition: Dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class CreateSessionRequest(BaseModel):
|
|
||||||
"""Request to create a unit session"""
|
|
||||||
unit_id: str
|
|
||||||
student_id: str
|
|
||||||
locale: str = "de-DE"
|
|
||||||
difficulty: str = "base"
|
|
||||||
|
|
||||||
|
|
||||||
class SessionResponse(BaseModel):
|
|
||||||
"""Response after creating a session"""
|
|
||||||
session_id: str
|
|
||||||
unit_definition_url: str
|
|
||||||
session_token: str
|
|
||||||
telemetry_endpoint: str
|
|
||||||
expires_at: datetime
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryEvent(BaseModel):
|
|
||||||
"""Single telemetry event"""
|
|
||||||
ts: Optional[str] = None
|
|
||||||
type: str = Field(..., alias="type")
|
|
||||||
stop_id: Optional[str] = None
|
|
||||||
metrics: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
populate_by_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryPayload(BaseModel):
|
|
||||||
"""Batch telemetry payload"""
|
|
||||||
session_id: str
|
|
||||||
events: List[TelemetryEvent]
|
|
||||||
|
|
||||||
|
|
||||||
class TelemetryResponse(BaseModel):
|
|
||||||
"""Response after receiving telemetry"""
|
|
||||||
accepted: int
|
|
||||||
|
|
||||||
|
|
||||||
class PostcheckAnswer(BaseModel):
|
|
||||||
"""Single postcheck answer"""
|
|
||||||
question_id: str
|
|
||||||
answer: str
|
|
||||||
|
|
||||||
|
|
||||||
class CompleteSessionRequest(BaseModel):
|
|
||||||
"""Request to complete a session"""
|
|
||||||
postcheck_answers: Optional[List[PostcheckAnswer]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SessionSummaryResponse(BaseModel):
|
|
||||||
"""Response with session summary"""
|
|
||||||
summary: Dict[str, Any]
|
|
||||||
next_recommendations: Dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class UnitListItem(BaseModel):
|
|
||||||
"""Unit list item"""
|
|
||||||
unit_id: str
|
|
||||||
template: str
|
|
||||||
difficulty: str
|
|
||||||
duration_minutes: int
|
|
||||||
locale: List[str]
|
|
||||||
grade_band: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
class RecommendedUnit(BaseModel):
|
|
||||||
"""Recommended unit with reason"""
|
|
||||||
unit_id: str
|
|
||||||
template: str
|
|
||||||
difficulty: str
|
|
||||||
reason: str
|
|
||||||
|
|
||||||
|
|
||||||
class CreateUnitRequest(BaseModel):
|
|
||||||
"""Request to create a new unit definition"""
|
|
||||||
unit_id: str = Field(..., description="Unique unit identifier")
|
|
||||||
template: str = Field(..., description="Template type: flight_path or station_loop")
|
|
||||||
version: str = Field(default="1.0.0", description="Version string")
|
|
||||||
locale: List[str] = Field(default=["de-DE"], description="Supported locales")
|
|
||||||
grade_band: List[str] = Field(default=["5", "6", "7"], description="Target grade levels")
|
|
||||||
duration_minutes: int = Field(default=8, ge=3, le=20, description="Expected duration")
|
|
||||||
difficulty: str = Field(default="base", description="Difficulty level: base or advanced")
|
|
||||||
subject: Optional[str] = Field(default=None, description="Subject area")
|
|
||||||
topic: Optional[str] = Field(default=None, description="Topic within subject")
|
|
||||||
learning_objectives: List[str] = Field(default=[], description="Learning objectives")
|
|
||||||
stops: List[Dict[str, Any]] = Field(default=[], description="Unit stops/stations")
|
|
||||||
precheck: Optional[Dict[str, Any]] = Field(default=None, description="Pre-check configuration")
|
|
||||||
postcheck: Optional[Dict[str, Any]] = Field(default=None, description="Post-check configuration")
|
|
||||||
teacher_controls: Optional[Dict[str, Any]] = Field(default=None, description="Teacher control settings")
|
|
||||||
assets: Optional[Dict[str, Any]] = Field(default=None, description="Asset configuration")
|
|
||||||
metadata: Optional[Dict[str, Any]] = Field(default=None, description="Additional metadata")
|
|
||||||
status: str = Field(default="draft", description="Publication status: draft or published")
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateUnitRequest(BaseModel):
|
|
||||||
"""Request to update an existing unit definition"""
|
|
||||||
version: Optional[str] = None
|
|
||||||
locale: Optional[List[str]] = None
|
|
||||||
grade_band: Optional[List[str]] = None
|
|
||||||
duration_minutes: Optional[int] = Field(default=None, ge=3, le=20)
|
|
||||||
difficulty: Optional[str] = None
|
|
||||||
subject: Optional[str] = None
|
|
||||||
topic: Optional[str] = None
|
|
||||||
learning_objectives: Optional[List[str]] = None
|
|
||||||
stops: Optional[List[Dict[str, Any]]] = None
|
|
||||||
precheck: Optional[Dict[str, Any]] = None
|
|
||||||
postcheck: Optional[Dict[str, Any]] = None
|
|
||||||
teacher_controls: Optional[Dict[str, Any]] = None
|
|
||||||
assets: Optional[Dict[str, Any]] = None
|
|
||||||
metadata: Optional[Dict[str, Any]] = None
|
|
||||||
status: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(BaseModel):
|
|
||||||
"""Single validation error"""
|
|
||||||
field: str
|
|
||||||
message: str
|
|
||||||
severity: str = "error" # error or warning
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationResult(BaseModel):
|
|
||||||
"""Result of unit validation"""
|
|
||||||
valid: bool
|
|
||||||
errors: List[ValidationError] = []
|
|
||||||
warnings: List[ValidationError] = []
|
|
||||||
|
|||||||
@@ -1,494 +1,4 @@
|
|||||||
# ==============================================
|
# Backward-compat shim -- module moved to units/routes.py
|
||||||
# Breakpilot Drive - Unit API Routes
|
import importlib as _importlib
|
||||||
# ==============================================
|
import sys as _sys
|
||||||
# Endpoints for listing/getting definitions, sessions, telemetry,
|
_sys.modules[__name__] = _importlib.import_module("units.routes")
|
||||||
# recommendations, and analytics.
|
|
||||||
# CRUD definition routes are in unit_definition_routes.py.
|
|
||||||
# Extracted from unit_api.py for file-size compliance.
|
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import uuid
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from unit_models import (
|
|
||||||
UnitDefinitionResponse,
|
|
||||||
CreateSessionRequest,
|
|
||||||
SessionResponse,
|
|
||||||
TelemetryPayload,
|
|
||||||
TelemetryResponse,
|
|
||||||
CompleteSessionRequest,
|
|
||||||
SessionSummaryResponse,
|
|
||||||
UnitListItem,
|
|
||||||
RecommendedUnit,
|
|
||||||
)
|
|
||||||
from unit_helpers import (
|
|
||||||
get_optional_current_user,
|
|
||||||
get_unit_database,
|
|
||||||
create_session_token,
|
|
||||||
get_session_from_token,
|
|
||||||
REQUIRE_AUTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Definition List/Get Endpoints
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/definitions", response_model=List[UnitListItem])
|
|
||||||
async def list_unit_definitions(
|
|
||||||
template: Optional[str] = Query(None, description="Filter by template: flight_path, station_loop"),
|
|
||||||
grade: Optional[str] = Query(None, description="Filter by grade level"),
|
|
||||||
locale: str = Query("de-DE", description="Filter by locale"),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> List[UnitListItem]:
|
|
||||||
"""
|
|
||||||
List available unit definitions.
|
|
||||||
|
|
||||||
Returns published units matching the filter criteria.
|
|
||||||
"""
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
units = await db.list_units(
|
|
||||||
template=template,
|
|
||||||
grade=grade,
|
|
||||||
locale=locale,
|
|
||||||
published_only=True
|
|
||||||
)
|
|
||||||
return [
|
|
||||||
UnitListItem(
|
|
||||||
unit_id=u["unit_id"],
|
|
||||||
template=u["template"],
|
|
||||||
difficulty=u["difficulty"],
|
|
||||||
duration_minutes=u["duration_minutes"],
|
|
||||||
locale=u["locale"],
|
|
||||||
grade_band=u["grade_band"],
|
|
||||||
)
|
|
||||||
for u in units
|
|
||||||
]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to list units: {e}")
|
|
||||||
|
|
||||||
# Fallback: return demo unit
|
|
||||||
return [
|
|
||||||
UnitListItem(
|
|
||||||
unit_id="demo_unit_v1",
|
|
||||||
template="flight_path",
|
|
||||||
difficulty="base",
|
|
||||||
duration_minutes=5,
|
|
||||||
locale=["de-DE"],
|
|
||||||
grade_band=["5", "6", "7"],
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/definitions/{unit_id}", response_model=UnitDefinitionResponse)
|
|
||||||
async def get_unit_definition(
|
|
||||||
unit_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> UnitDefinitionResponse:
|
|
||||||
"""
|
|
||||||
Get a specific unit definition.
|
|
||||||
|
|
||||||
Returns the full unit configuration including stops, interactions, etc.
|
|
||||||
"""
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
unit = await db.get_unit_definition(unit_id)
|
|
||||||
if unit:
|
|
||||||
return UnitDefinitionResponse(
|
|
||||||
unit_id=unit["unit_id"],
|
|
||||||
template=unit["template"],
|
|
||||||
version=unit["version"],
|
|
||||||
locale=unit["locale"],
|
|
||||||
grade_band=unit["grade_band"],
|
|
||||||
duration_minutes=unit["duration_minutes"],
|
|
||||||
difficulty=unit["difficulty"],
|
|
||||||
definition=unit["definition"],
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get unit definition: {e}")
|
|
||||||
|
|
||||||
# Demo unit fallback
|
|
||||||
if unit_id == "demo_unit_v1":
|
|
||||||
return UnitDefinitionResponse(
|
|
||||||
unit_id="demo_unit_v1",
|
|
||||||
template="flight_path",
|
|
||||||
version="1.0.0",
|
|
||||||
locale=["de-DE"],
|
|
||||||
grade_band=["5", "6", "7"],
|
|
||||||
duration_minutes=5,
|
|
||||||
difficulty="base",
|
|
||||||
definition={
|
|
||||||
"unit_id": "demo_unit_v1",
|
|
||||||
"template": "flight_path",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"learning_objectives": ["Demo: Grundfunktion testen"],
|
|
||||||
"stops": [
|
|
||||||
{"stop_id": "stop_1", "label": {"de-DE": "Start"}, "interaction": {"type": "aim_and_pass"}},
|
|
||||||
{"stop_id": "stop_2", "label": {"de-DE": "Mitte"}, "interaction": {"type": "aim_and_pass"}},
|
|
||||||
{"stop_id": "stop_3", "label": {"de-DE": "Ende"}, "interaction": {"type": "aim_and_pass"}},
|
|
||||||
],
|
|
||||||
"teacher_controls": {"allow_skip": True, "allow_replay": True},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Session Endpoints
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.post("/sessions", response_model=SessionResponse)
|
|
||||||
async def create_unit_session(
|
|
||||||
request_data: CreateSessionRequest,
|
|
||||||
request: Request,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> SessionResponse:
|
|
||||||
"""
|
|
||||||
Create a new unit session.
|
|
||||||
|
|
||||||
- Validates unit exists
|
|
||||||
- Creates session record
|
|
||||||
- Returns session token for telemetry
|
|
||||||
"""
|
|
||||||
session_id = str(uuid.uuid4())
|
|
||||||
expires_at = datetime.utcnow() + timedelta(hours=4)
|
|
||||||
|
|
||||||
# Validate unit exists
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
unit = await db.get_unit_definition(request_data.unit_id)
|
|
||||||
if not unit:
|
|
||||||
raise HTTPException(status_code=404, detail=f"Unit not found: {request_data.unit_id}")
|
|
||||||
|
|
||||||
# Create session in database
|
|
||||||
total_stops = len(unit.get("definition", {}).get("stops", []))
|
|
||||||
await db.create_session(
|
|
||||||
session_id=session_id,
|
|
||||||
unit_id=request_data.unit_id,
|
|
||||||
student_id=request_data.student_id,
|
|
||||||
locale=request_data.locale,
|
|
||||||
difficulty=request_data.difficulty,
|
|
||||||
total_stops=total_stops,
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to create session: {e}")
|
|
||||||
# Continue with in-memory fallback
|
|
||||||
|
|
||||||
# Create session token
|
|
||||||
session_token = create_session_token(session_id, request_data.student_id)
|
|
||||||
|
|
||||||
# Build definition URL
|
|
||||||
base_url = str(request.base_url).rstrip("/")
|
|
||||||
definition_url = f"{base_url}/api/units/definitions/{request_data.unit_id}"
|
|
||||||
|
|
||||||
return SessionResponse(
|
|
||||||
session_id=session_id,
|
|
||||||
unit_definition_url=definition_url,
|
|
||||||
session_token=session_token,
|
|
||||||
telemetry_endpoint="/api/units/telemetry",
|
|
||||||
expires_at=expires_at,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/telemetry", response_model=TelemetryResponse)
|
|
||||||
async def receive_telemetry(
|
|
||||||
payload: TelemetryPayload,
|
|
||||||
request: Request,
|
|
||||||
) -> TelemetryResponse:
|
|
||||||
"""
|
|
||||||
Receive batched telemetry events from Unity client.
|
|
||||||
|
|
||||||
- Validates session token
|
|
||||||
- Stores events in database
|
|
||||||
- Returns count of accepted events
|
|
||||||
"""
|
|
||||||
# Verify session token
|
|
||||||
session_data = await get_session_from_token(request)
|
|
||||||
if session_data is None:
|
|
||||||
# Allow without auth in dev mode
|
|
||||||
if REQUIRE_AUTH:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
|
||||||
logger.warning("Telemetry received without valid token (dev mode)")
|
|
||||||
|
|
||||||
# Verify session_id matches
|
|
||||||
if session_data and session_data.get("session_id") != payload.session_id:
|
|
||||||
raise HTTPException(status_code=403, detail="Session ID mismatch")
|
|
||||||
|
|
||||||
accepted = 0
|
|
||||||
db = await get_unit_database()
|
|
||||||
|
|
||||||
for event in payload.events:
|
|
||||||
try:
|
|
||||||
# Set timestamp if not provided
|
|
||||||
timestamp = event.ts or datetime.utcnow().isoformat()
|
|
||||||
|
|
||||||
if db:
|
|
||||||
await db.store_telemetry_event(
|
|
||||||
session_id=payload.session_id,
|
|
||||||
event_type=event.type,
|
|
||||||
stop_id=event.stop_id,
|
|
||||||
timestamp=timestamp,
|
|
||||||
metrics=event.metrics,
|
|
||||||
)
|
|
||||||
|
|
||||||
accepted += 1
|
|
||||||
logger.debug(f"Telemetry: {event.type} for session {payload.session_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to store telemetry event: {e}")
|
|
||||||
|
|
||||||
return TelemetryResponse(accepted=accepted)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/sessions/{session_id}/complete", response_model=SessionSummaryResponse)
|
|
||||||
async def complete_session(
|
|
||||||
session_id: str,
|
|
||||||
request_data: CompleteSessionRequest,
|
|
||||||
request: Request,
|
|
||||||
) -> SessionSummaryResponse:
|
|
||||||
"""
|
|
||||||
Complete a unit session.
|
|
||||||
|
|
||||||
- Processes postcheck answers if provided
|
|
||||||
- Calculates learning gain
|
|
||||||
- Returns summary and recommendations
|
|
||||||
"""
|
|
||||||
# Verify session token
|
|
||||||
session_data = await get_session_from_token(request)
|
|
||||||
if REQUIRE_AUTH and session_data is None:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
|
||||||
|
|
||||||
db = await get_unit_database()
|
|
||||||
summary = {}
|
|
||||||
recommendations = {}
|
|
||||||
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
# Get session data
|
|
||||||
session = await db.get_session(session_id)
|
|
||||||
if not session:
|
|
||||||
raise HTTPException(status_code=404, detail="Session not found")
|
|
||||||
|
|
||||||
# Calculate postcheck score if answers provided
|
|
||||||
postcheck_score = None
|
|
||||||
if request_data.postcheck_answers:
|
|
||||||
# Simple scoring: count correct answers
|
|
||||||
# In production, would validate against question bank
|
|
||||||
postcheck_score = len(request_data.postcheck_answers) * 0.2 # Placeholder
|
|
||||||
postcheck_score = min(postcheck_score, 1.0)
|
|
||||||
|
|
||||||
# Complete session in database
|
|
||||||
await db.complete_session(
|
|
||||||
session_id=session_id,
|
|
||||||
postcheck_score=postcheck_score,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get updated session summary
|
|
||||||
session = await db.get_session(session_id)
|
|
||||||
|
|
||||||
# Calculate learning gain
|
|
||||||
pre_score = session.get("precheck_score")
|
|
||||||
post_score = session.get("postcheck_score")
|
|
||||||
learning_gain = None
|
|
||||||
if pre_score is not None and post_score is not None:
|
|
||||||
learning_gain = post_score - pre_score
|
|
||||||
|
|
||||||
summary = {
|
|
||||||
"session_id": session_id,
|
|
||||||
"unit_id": session.get("unit_id"),
|
|
||||||
"duration_seconds": session.get("duration_seconds"),
|
|
||||||
"completion_rate": session.get("completion_rate"),
|
|
||||||
"precheck_score": pre_score,
|
|
||||||
"postcheck_score": post_score,
|
|
||||||
"pre_to_post_gain": learning_gain,
|
|
||||||
"stops_completed": session.get("stops_completed"),
|
|
||||||
"total_stops": session.get("total_stops"),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get recommendations
|
|
||||||
recommendations = await db.get_recommendations(
|
|
||||||
student_id=session.get("student_id"),
|
|
||||||
completed_unit_id=session.get("unit_id"),
|
|
||||||
)
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to complete session: {e}")
|
|
||||||
summary = {"session_id": session_id, "error": str(e)}
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Fallback summary
|
|
||||||
summary = {
|
|
||||||
"session_id": session_id,
|
|
||||||
"duration_seconds": 0,
|
|
||||||
"completion_rate": 1.0,
|
|
||||||
"message": "Database not available",
|
|
||||||
}
|
|
||||||
|
|
||||||
return SessionSummaryResponse(
|
|
||||||
summary=summary,
|
|
||||||
next_recommendations=recommendations or {
|
|
||||||
"h5p_activity_ids": [],
|
|
||||||
"worksheet_pdf_url": None,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/sessions/{session_id}")
|
|
||||||
async def get_session(
|
|
||||||
session_id: str,
|
|
||||||
request: Request,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get session details.
|
|
||||||
|
|
||||||
Returns current state of a session including progress.
|
|
||||||
"""
|
|
||||||
# Verify session token
|
|
||||||
session_data = await get_session_from_token(request)
|
|
||||||
if REQUIRE_AUTH and session_data is None:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
|
||||||
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
session = await db.get_session(session_id)
|
|
||||||
if session:
|
|
||||||
return session
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get session: {e}")
|
|
||||||
|
|
||||||
raise HTTPException(status_code=404, detail="Session not found")
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
|
||||||
# Recommendations & Analytics
|
|
||||||
# ==============================================
|
|
||||||
|
|
||||||
@router.get("/recommendations/{student_id}", response_model=List[RecommendedUnit])
|
|
||||||
async def get_recommendations(
|
|
||||||
student_id: str,
|
|
||||||
grade: Optional[str] = Query(None, description="Grade level filter"),
|
|
||||||
locale: str = Query("de-DE", description="Locale filter"),
|
|
||||||
limit: int = Query(5, ge=1, le=20),
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> List[RecommendedUnit]:
|
|
||||||
"""
|
|
||||||
Get recommended units for a student.
|
|
||||||
|
|
||||||
Based on completion status and performance.
|
|
||||||
"""
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
recommendations = await db.get_student_recommendations(
|
|
||||||
student_id=student_id,
|
|
||||||
grade=grade,
|
|
||||||
locale=locale,
|
|
||||||
limit=limit,
|
|
||||||
)
|
|
||||||
return [
|
|
||||||
RecommendedUnit(
|
|
||||||
unit_id=r["unit_id"],
|
|
||||||
template=r["template"],
|
|
||||||
difficulty=r["difficulty"],
|
|
||||||
reason=r["reason"],
|
|
||||||
)
|
|
||||||
for r in recommendations
|
|
||||||
]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get recommendations: {e}")
|
|
||||||
|
|
||||||
# Fallback: recommend demo unit
|
|
||||||
return [
|
|
||||||
RecommendedUnit(
|
|
||||||
unit_id="demo_unit_v1",
|
|
||||||
template="flight_path",
|
|
||||||
difficulty="base",
|
|
||||||
reason="Neu: Noch nicht gespielt",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/analytics/student/{student_id}")
|
|
||||||
async def get_student_analytics(
|
|
||||||
student_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get unit analytics for a student.
|
|
||||||
|
|
||||||
Includes completion rates, learning gains, time spent.
|
|
||||||
"""
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
analytics = await db.get_student_unit_analytics(student_id)
|
|
||||||
return analytics
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get analytics: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"student_id": student_id,
|
|
||||||
"units_attempted": 0,
|
|
||||||
"units_completed": 0,
|
|
||||||
"avg_completion_rate": 0.0,
|
|
||||||
"avg_learning_gain": None,
|
|
||||||
"total_minutes": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/analytics/unit/{unit_id}")
|
|
||||||
async def get_unit_analytics(
|
|
||||||
unit_id: str,
|
|
||||||
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get analytics for a specific unit.
|
|
||||||
|
|
||||||
Shows aggregate performance across all students.
|
|
||||||
"""
|
|
||||||
db = await get_unit_database()
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
analytics = await db.get_unit_performance(unit_id)
|
|
||||||
return analytics
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get unit analytics: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"unit_id": unit_id,
|
|
||||||
"total_sessions": 0,
|
|
||||||
"completed_sessions": 0,
|
|
||||||
"completion_percent": 0.0,
|
|
||||||
"avg_duration_minutes": 0,
|
|
||||||
"avg_learning_gain": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def health_check() -> Dict[str, Any]:
|
|
||||||
"""Health check for unit API."""
|
|
||||||
db = await get_unit_database()
|
|
||||||
db_status = "connected" if db else "disconnected"
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"service": "breakpilot-units",
|
|
||||||
"database": db_status,
|
|
||||||
"auth_required": REQUIRE_AUTH,
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
# units — Learning units, analytics, definitions, content generation.
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
"""
|
||||||
|
Breakpilot Drive - Unit Analytics API — Barrel Re-export.
|
||||||
|
|
||||||
|
Erweiterte Analytics fuer Lernfortschritt:
|
||||||
|
- Pre/Post Gain Visualisierung
|
||||||
|
- Misconception-Tracking
|
||||||
|
- Stop-Level Analytics
|
||||||
|
- Aggregierte Klassen-Statistiken
|
||||||
|
- Export-Funktionen
|
||||||
|
|
||||||
|
Split into:
|
||||||
|
- unit_analytics_models.py: Pydantic models & enums
|
||||||
|
- unit_analytics_helpers.py: Database access & computation helpers
|
||||||
|
- unit_analytics_routes.py: Core analytics endpoint handlers
|
||||||
|
- unit_analytics_export.py: Export & dashboard endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .analytics_routes import router as _routes_router
|
||||||
|
from .analytics_export import router as _export_router
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/analytics", tags=["Unit Analytics"])
|
||||||
|
router.include_router(_routes_router)
|
||||||
|
router.include_router(_export_router)
|
||||||
@@ -0,0 +1,145 @@
|
|||||||
|
"""
|
||||||
|
Unit Analytics API - Export & Dashboard Routes.
|
||||||
|
|
||||||
|
Export endpoints for learning gains and misconceptions, plus dashboard overview.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Query
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
from .analytics_models import TimeRange, ExportFormat
|
||||||
|
from .analytics_helpers import get_analytics_database
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Unit Analytics"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Export
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/export/learning-gains")
|
||||||
|
async def export_learning_gains(
|
||||||
|
unit_id: Optional[str] = Query(None),
|
||||||
|
class_id: Optional[str] = Query(None),
|
||||||
|
time_range: TimeRange = Query(TimeRange.ALL),
|
||||||
|
format: ExportFormat = Query(ExportFormat.JSON),
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Export learning gain data.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
data = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
data = await db.export_learning_gains(
|
||||||
|
unit_id=unit_id, class_id=class_id, time_range=time_range.value
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to export data: {e}")
|
||||||
|
|
||||||
|
if format == ExportFormat.CSV:
|
||||||
|
if not data:
|
||||||
|
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
||||||
|
else:
|
||||||
|
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
||||||
|
for row in data:
|
||||||
|
csv_content += f"{row['student_id']},{row['unit_id']},{row.get('precheck', '')},{row.get('postcheck', '')},{row.get('gain', '')}\n"
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=csv_content,
|
||||||
|
media_type="text/csv",
|
||||||
|
headers={"Content-Disposition": "attachment; filename=learning_gains.csv"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"export_date": datetime.utcnow().isoformat(),
|
||||||
|
"filters": {
|
||||||
|
"unit_id": unit_id, "class_id": class_id, "time_range": time_range.value,
|
||||||
|
},
|
||||||
|
"data": data,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/export/misconceptions")
|
||||||
|
async def export_misconceptions(
|
||||||
|
class_id: Optional[str] = Query(None),
|
||||||
|
format: ExportFormat = Query(ExportFormat.JSON),
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Export misconception data for further analysis.
|
||||||
|
"""
|
||||||
|
# Import here to avoid circular dependency
|
||||||
|
from .analytics_routes import get_misconception_report
|
||||||
|
|
||||||
|
report = await get_misconception_report(
|
||||||
|
class_id=class_id, unit_id=None,
|
||||||
|
time_range=TimeRange.MONTH, limit=100
|
||||||
|
)
|
||||||
|
|
||||||
|
if format == ExportFormat.CSV:
|
||||||
|
csv_content = "concept_id,concept_label,misconception,frequency,unit_id,stop_id\n"
|
||||||
|
for m in report.most_common:
|
||||||
|
csv_content += f'"{m.concept_id}","{m.concept_label}","{m.misconception_text}",{m.frequency},"{m.unit_id}","{m.stop_id}"\n'
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=csv_content,
|
||||||
|
media_type="text/csv",
|
||||||
|
headers={"Content-Disposition": "attachment; filename=misconceptions.csv"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"export_date": datetime.utcnow().isoformat(),
|
||||||
|
"class_id": class_id,
|
||||||
|
"total_entries": len(report.most_common),
|
||||||
|
"data": [m.model_dump() for m in report.most_common],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Dashboard Aggregates
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/dashboard/overview")
|
||||||
|
async def get_analytics_overview(
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get high-level analytics overview for dashboard.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
overview = await db.get_analytics_overview(time_range.value)
|
||||||
|
return overview
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get analytics overview: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"time_range": time_range.value,
|
||||||
|
"total_sessions": 0,
|
||||||
|
"unique_students": 0,
|
||||||
|
"avg_completion_rate": 0.0,
|
||||||
|
"avg_learning_gain": 0.0,
|
||||||
|
"most_played_units": [],
|
||||||
|
"struggling_concepts": [],
|
||||||
|
"active_classes": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check() -> Dict[str, Any]:
|
||||||
|
"""Health check for analytics API."""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "unit-analytics",
|
||||||
|
"database": "connected" if db else "disconnected",
|
||||||
|
}
|
||||||
@@ -0,0 +1,97 @@
|
|||||||
|
"""
|
||||||
|
Unit Analytics API - Helpers.
|
||||||
|
|
||||||
|
Database access, statistical computation, and utility functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Feature flags
|
||||||
|
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||||
|
|
||||||
|
# Database singleton
|
||||||
|
_analytics_db = None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_analytics_database():
|
||||||
|
"""Get analytics database instance."""
|
||||||
|
global _analytics_db
|
||||||
|
if not USE_DATABASE:
|
||||||
|
return None
|
||||||
|
if _analytics_db is None:
|
||||||
|
try:
|
||||||
|
from unit.database import get_analytics_db
|
||||||
|
_analytics_db = await get_analytics_db()
|
||||||
|
logger.info("Analytics database initialized")
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Analytics database module not available")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Analytics database not available: {e}")
|
||||||
|
return _analytics_db
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_gain_distribution(gains: List[float]) -> Dict[str, int]:
|
||||||
|
"""Calculate distribution of learning gains into buckets."""
|
||||||
|
distribution = {
|
||||||
|
"< -20%": 0,
|
||||||
|
"-20% to -10%": 0,
|
||||||
|
"-10% to 0%": 0,
|
||||||
|
"0% to 10%": 0,
|
||||||
|
"10% to 20%": 0,
|
||||||
|
"> 20%": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
for gain in gains:
|
||||||
|
gain_percent = gain * 100
|
||||||
|
if gain_percent < -20:
|
||||||
|
distribution["< -20%"] += 1
|
||||||
|
elif gain_percent < -10:
|
||||||
|
distribution["-20% to -10%"] += 1
|
||||||
|
elif gain_percent < 0:
|
||||||
|
distribution["-10% to 0%"] += 1
|
||||||
|
elif gain_percent < 10:
|
||||||
|
distribution["0% to 10%"] += 1
|
||||||
|
elif gain_percent < 20:
|
||||||
|
distribution["10% to 20%"] += 1
|
||||||
|
else:
|
||||||
|
distribution["> 20%"] += 1
|
||||||
|
|
||||||
|
return distribution
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_trend(scores: List[float]) -> str:
|
||||||
|
"""Calculate trend from a series of scores."""
|
||||||
|
if len(scores) < 3:
|
||||||
|
return "insufficient_data"
|
||||||
|
|
||||||
|
# Simple linear regression
|
||||||
|
n = len(scores)
|
||||||
|
x_mean = (n - 1) / 2
|
||||||
|
y_mean = sum(scores) / n
|
||||||
|
|
||||||
|
numerator = sum((i - x_mean) * (scores[i] - y_mean) for i in range(n))
|
||||||
|
denominator = sum((i - x_mean) ** 2 for i in range(n))
|
||||||
|
|
||||||
|
if denominator == 0:
|
||||||
|
return "stable"
|
||||||
|
|
||||||
|
slope = numerator / denominator
|
||||||
|
|
||||||
|
if slope > 0.05:
|
||||||
|
return "improving"
|
||||||
|
elif slope < -0.05:
|
||||||
|
return "declining"
|
||||||
|
else:
|
||||||
|
return "stable"
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_difficulty_rating(success_rate: float, avg_attempts: float) -> float:
|
||||||
|
"""Calculate difficulty rating 1-5 based on success metrics."""
|
||||||
|
# Lower success rate and higher attempts = higher difficulty
|
||||||
|
base_difficulty = (1 - success_rate) * 3 + 1 # 1-4 range
|
||||||
|
attempt_modifier = min(avg_attempts - 1, 1) # 0-1 range
|
||||||
|
return min(5.0, base_difficulty + attempt_modifier)
|
||||||
@@ -0,0 +1,127 @@
|
|||||||
|
"""
|
||||||
|
Unit Analytics API - Pydantic Models.
|
||||||
|
|
||||||
|
Data models for learning gains, stop performance, misconceptions,
|
||||||
|
student progress, class comparison, and export.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class TimeRange(str, Enum):
|
||||||
|
"""Time range for analytics queries"""
|
||||||
|
WEEK = "week"
|
||||||
|
MONTH = "month"
|
||||||
|
QUARTER = "quarter"
|
||||||
|
ALL = "all"
|
||||||
|
|
||||||
|
|
||||||
|
class LearningGainData(BaseModel):
|
||||||
|
"""Pre/Post learning gain data point"""
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
unit_id: str
|
||||||
|
precheck_score: float
|
||||||
|
postcheck_score: float
|
||||||
|
learning_gain: float
|
||||||
|
percentile: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LearningGainSummary(BaseModel):
|
||||||
|
"""Aggregated learning gain statistics"""
|
||||||
|
unit_id: str
|
||||||
|
unit_title: str
|
||||||
|
total_students: int
|
||||||
|
avg_precheck: float
|
||||||
|
avg_postcheck: float
|
||||||
|
avg_gain: float
|
||||||
|
median_gain: float
|
||||||
|
std_deviation: float
|
||||||
|
positive_gain_count: int
|
||||||
|
negative_gain_count: int
|
||||||
|
no_change_count: int
|
||||||
|
gain_distribution: Dict[str, int]
|
||||||
|
individual_gains: List[LearningGainData]
|
||||||
|
|
||||||
|
|
||||||
|
class StopPerformance(BaseModel):
|
||||||
|
"""Performance data for a single stop"""
|
||||||
|
stop_id: str
|
||||||
|
stop_label: str
|
||||||
|
attempts_total: int
|
||||||
|
success_rate: float
|
||||||
|
avg_time_seconds: float
|
||||||
|
avg_attempts_before_success: float
|
||||||
|
common_errors: List[str]
|
||||||
|
difficulty_rating: float # 1-5 based on performance
|
||||||
|
|
||||||
|
|
||||||
|
class UnitPerformanceDetail(BaseModel):
|
||||||
|
"""Detailed unit performance breakdown"""
|
||||||
|
unit_id: str
|
||||||
|
unit_title: str
|
||||||
|
template: str
|
||||||
|
total_sessions: int
|
||||||
|
completed_sessions: int
|
||||||
|
completion_rate: float
|
||||||
|
avg_duration_minutes: float
|
||||||
|
stops: List[StopPerformance]
|
||||||
|
bottleneck_stops: List[str] # Stops where students struggle most
|
||||||
|
|
||||||
|
|
||||||
|
class MisconceptionEntry(BaseModel):
|
||||||
|
"""Individual misconception tracking"""
|
||||||
|
concept_id: str
|
||||||
|
concept_label: str
|
||||||
|
misconception_text: str
|
||||||
|
frequency: int
|
||||||
|
affected_student_ids: List[str]
|
||||||
|
unit_id: str
|
||||||
|
stop_id: str
|
||||||
|
detected_via: str # "precheck", "postcheck", "interaction"
|
||||||
|
first_detected: datetime
|
||||||
|
last_detected: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class MisconceptionReport(BaseModel):
|
||||||
|
"""Comprehensive misconception report"""
|
||||||
|
class_id: Optional[str]
|
||||||
|
time_range: str
|
||||||
|
total_misconceptions: int
|
||||||
|
unique_concepts: int
|
||||||
|
most_common: List[MisconceptionEntry]
|
||||||
|
by_unit: Dict[str, List[MisconceptionEntry]]
|
||||||
|
trending_up: List[MisconceptionEntry] # Getting more frequent
|
||||||
|
resolved: List[MisconceptionEntry] # No longer appearing
|
||||||
|
|
||||||
|
|
||||||
|
class StudentProgressTimeline(BaseModel):
|
||||||
|
"""Timeline of student progress"""
|
||||||
|
student_id: str
|
||||||
|
student_name: str
|
||||||
|
units_completed: int
|
||||||
|
total_time_minutes: int
|
||||||
|
avg_score: float
|
||||||
|
trend: str # "improving", "stable", "declining"
|
||||||
|
timeline: List[Dict[str, Any]] # List of session events
|
||||||
|
|
||||||
|
|
||||||
|
class ClassComparisonData(BaseModel):
|
||||||
|
"""Data for comparing class performance"""
|
||||||
|
class_id: str
|
||||||
|
class_name: str
|
||||||
|
student_count: int
|
||||||
|
units_assigned: int
|
||||||
|
avg_completion_rate: float
|
||||||
|
avg_learning_gain: float
|
||||||
|
avg_time_per_unit: float
|
||||||
|
|
||||||
|
|
||||||
|
class ExportFormat(str, Enum):
|
||||||
|
"""Export format options"""
|
||||||
|
JSON = "json"
|
||||||
|
CSV = "csv"
|
||||||
@@ -0,0 +1,394 @@
|
|||||||
|
"""
|
||||||
|
Unit Analytics API - Routes.
|
||||||
|
|
||||||
|
All API endpoints for learning gain, stop-level, misconception,
|
||||||
|
student timeline, class comparison, export, and dashboard analytics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import statistics
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Query
|
||||||
|
|
||||||
|
from .analytics_models import (
|
||||||
|
TimeRange,
|
||||||
|
LearningGainData,
|
||||||
|
LearningGainSummary,
|
||||||
|
StopPerformance,
|
||||||
|
UnitPerformanceDetail,
|
||||||
|
MisconceptionEntry,
|
||||||
|
MisconceptionReport,
|
||||||
|
StudentProgressTimeline,
|
||||||
|
ClassComparisonData,
|
||||||
|
)
|
||||||
|
from .analytics_helpers import (
|
||||||
|
get_analytics_database,
|
||||||
|
calculate_gain_distribution,
|
||||||
|
calculate_trend,
|
||||||
|
calculate_difficulty_rating,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(tags=["Unit Analytics"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Learning Gain
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
# NOTE: Static routes must come BEFORE dynamic routes like /{unit_id}
|
||||||
|
@router.get("/learning-gain/compare")
|
||||||
|
async def compare_learning_gains(
|
||||||
|
unit_ids: str = Query(..., description="Comma-separated unit IDs"),
|
||||||
|
class_id: Optional[str] = Query(None),
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Compare learning gains across multiple units.
|
||||||
|
"""
|
||||||
|
unit_list = [u.strip() for u in unit_ids.split(",")]
|
||||||
|
comparisons = []
|
||||||
|
|
||||||
|
for unit_id in unit_list:
|
||||||
|
try:
|
||||||
|
summary = await get_learning_gain_analysis(unit_id, class_id, time_range)
|
||||||
|
comparisons.append({
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"avg_gain": summary.avg_gain,
|
||||||
|
"median_gain": summary.median_gain,
|
||||||
|
"total_students": summary.total_students,
|
||||||
|
"positive_rate": summary.positive_gain_count / max(summary.total_students, 1),
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get comparison for {unit_id}: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"time_range": time_range.value,
|
||||||
|
"class_id": class_id,
|
||||||
|
"comparisons": sorted(comparisons, key=lambda x: x["avg_gain"], reverse=True),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/learning-gain/{unit_id}", response_model=LearningGainSummary)
|
||||||
|
async def get_learning_gain_analysis(
|
||||||
|
unit_id: str,
|
||||||
|
class_id: Optional[str] = Query(None, description="Filter by class"),
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH, description="Time range for analysis"),
|
||||||
|
) -> LearningGainSummary:
|
||||||
|
"""
|
||||||
|
Get detailed pre/post learning gain analysis for a unit.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
individual_gains = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
sessions = await db.get_unit_sessions_with_scores(
|
||||||
|
unit_id=unit_id,
|
||||||
|
class_id=class_id,
|
||||||
|
time_range=time_range.value
|
||||||
|
)
|
||||||
|
|
||||||
|
for session in sessions:
|
||||||
|
if session.get("precheck_score") is not None and session.get("postcheck_score") is not None:
|
||||||
|
gain = session["postcheck_score"] - session["precheck_score"]
|
||||||
|
individual_gains.append(LearningGainData(
|
||||||
|
student_id=session["student_id"],
|
||||||
|
student_name=session.get("student_name", session["student_id"][:8]),
|
||||||
|
unit_id=unit_id,
|
||||||
|
precheck_score=session["precheck_score"],
|
||||||
|
postcheck_score=session["postcheck_score"],
|
||||||
|
learning_gain=gain,
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get learning gain data: {e}")
|
||||||
|
|
||||||
|
# Calculate statistics
|
||||||
|
if not individual_gains:
|
||||||
|
return LearningGainSummary(
|
||||||
|
unit_id=unit_id,
|
||||||
|
unit_title=f"Unit {unit_id}",
|
||||||
|
total_students=0,
|
||||||
|
avg_precheck=0.0, avg_postcheck=0.0,
|
||||||
|
avg_gain=0.0, median_gain=0.0, std_deviation=0.0,
|
||||||
|
positive_gain_count=0, negative_gain_count=0, no_change_count=0,
|
||||||
|
gain_distribution={}, individual_gains=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
gains = [g.learning_gain for g in individual_gains]
|
||||||
|
prechecks = [g.precheck_score for g in individual_gains]
|
||||||
|
postchecks = [g.postcheck_score for g in individual_gains]
|
||||||
|
|
||||||
|
avg_gain = statistics.mean(gains)
|
||||||
|
median_gain = statistics.median(gains)
|
||||||
|
std_dev = statistics.stdev(gains) if len(gains) > 1 else 0.0
|
||||||
|
|
||||||
|
# Calculate percentiles
|
||||||
|
sorted_gains = sorted(gains)
|
||||||
|
for data in individual_gains:
|
||||||
|
rank = sorted_gains.index(data.learning_gain) + 1
|
||||||
|
data.percentile = rank / len(sorted_gains) * 100
|
||||||
|
|
||||||
|
return LearningGainSummary(
|
||||||
|
unit_id=unit_id,
|
||||||
|
unit_title=f"Unit {unit_id}",
|
||||||
|
total_students=len(individual_gains),
|
||||||
|
avg_precheck=statistics.mean(prechecks),
|
||||||
|
avg_postcheck=statistics.mean(postchecks),
|
||||||
|
avg_gain=avg_gain,
|
||||||
|
median_gain=median_gain,
|
||||||
|
std_deviation=std_dev,
|
||||||
|
positive_gain_count=sum(1 for g in gains if g > 0.01),
|
||||||
|
negative_gain_count=sum(1 for g in gains if g < -0.01),
|
||||||
|
no_change_count=sum(1 for g in gains if -0.01 <= g <= 0.01),
|
||||||
|
gain_distribution=calculate_gain_distribution(gains),
|
||||||
|
individual_gains=sorted(individual_gains, key=lambda x: x.learning_gain, reverse=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Stop-Level Analytics
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/unit/{unit_id}/stops", response_model=UnitPerformanceDetail)
|
||||||
|
async def get_unit_stop_analytics(
|
||||||
|
unit_id: str,
|
||||||
|
class_id: Optional[str] = Query(None),
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||||
|
) -> UnitPerformanceDetail:
|
||||||
|
"""
|
||||||
|
Get detailed stop-level performance analytics.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
stops_data = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
stop_stats = await db.get_stop_performance(
|
||||||
|
unit_id=unit_id, class_id=class_id, time_range=time_range.value
|
||||||
|
)
|
||||||
|
|
||||||
|
for stop in stop_stats:
|
||||||
|
difficulty = calculate_difficulty_rating(
|
||||||
|
stop.get("success_rate", 0.5),
|
||||||
|
stop.get("avg_attempts", 1.0)
|
||||||
|
)
|
||||||
|
stops_data.append(StopPerformance(
|
||||||
|
stop_id=stop["stop_id"],
|
||||||
|
stop_label=stop.get("stop_label", stop["stop_id"]),
|
||||||
|
attempts_total=stop.get("total_attempts", 0),
|
||||||
|
success_rate=stop.get("success_rate", 0.0),
|
||||||
|
avg_time_seconds=stop.get("avg_time_seconds", 0.0),
|
||||||
|
avg_attempts_before_success=stop.get("avg_attempts", 1.0),
|
||||||
|
common_errors=stop.get("common_errors", []),
|
||||||
|
difficulty_rating=difficulty,
|
||||||
|
))
|
||||||
|
|
||||||
|
unit_stats = await db.get_unit_overall_stats(unit_id, class_id, time_range.value)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get stop analytics: {e}")
|
||||||
|
unit_stats = {}
|
||||||
|
else:
|
||||||
|
unit_stats = {}
|
||||||
|
|
||||||
|
# Identify bottleneck stops
|
||||||
|
bottlenecks = [
|
||||||
|
s.stop_id for s in stops_data
|
||||||
|
if s.difficulty_rating > 3.5 or s.success_rate < 0.6
|
||||||
|
]
|
||||||
|
|
||||||
|
return UnitPerformanceDetail(
|
||||||
|
unit_id=unit_id,
|
||||||
|
unit_title=f"Unit {unit_id}",
|
||||||
|
template=unit_stats.get("template", "unknown"),
|
||||||
|
total_sessions=unit_stats.get("total_sessions", 0),
|
||||||
|
completed_sessions=unit_stats.get("completed_sessions", 0),
|
||||||
|
completion_rate=unit_stats.get("completion_rate", 0.0),
|
||||||
|
avg_duration_minutes=unit_stats.get("avg_duration_minutes", 0.0),
|
||||||
|
stops=stops_data,
|
||||||
|
bottleneck_stops=bottlenecks,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Misconception Tracking
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/misconceptions", response_model=MisconceptionReport)
|
||||||
|
async def get_misconception_report(
|
||||||
|
class_id: Optional[str] = Query(None),
|
||||||
|
unit_id: Optional[str] = Query(None),
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||||
|
limit: int = Query(20, ge=1, le=100),
|
||||||
|
) -> MisconceptionReport:
|
||||||
|
"""
|
||||||
|
Get comprehensive misconception report.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
misconceptions = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
raw_misconceptions = await db.get_misconceptions(
|
||||||
|
class_id=class_id, unit_id=unit_id,
|
||||||
|
time_range=time_range.value, limit=limit
|
||||||
|
)
|
||||||
|
|
||||||
|
for m in raw_misconceptions:
|
||||||
|
misconceptions.append(MisconceptionEntry(
|
||||||
|
concept_id=m["concept_id"],
|
||||||
|
concept_label=m["concept_label"],
|
||||||
|
misconception_text=m["misconception_text"],
|
||||||
|
frequency=m["frequency"],
|
||||||
|
affected_student_ids=m.get("student_ids", []),
|
||||||
|
unit_id=m["unit_id"],
|
||||||
|
stop_id=m["stop_id"],
|
||||||
|
detected_via=m.get("detected_via", "unknown"),
|
||||||
|
first_detected=m.get("first_detected", datetime.utcnow()),
|
||||||
|
last_detected=m.get("last_detected", datetime.utcnow()),
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get misconceptions: {e}")
|
||||||
|
|
||||||
|
# Group by unit
|
||||||
|
by_unit = {}
|
||||||
|
for m in misconceptions:
|
||||||
|
if m.unit_id not in by_unit:
|
||||||
|
by_unit[m.unit_id] = []
|
||||||
|
by_unit[m.unit_id].append(m)
|
||||||
|
|
||||||
|
trending_up = misconceptions[:3] if misconceptions else []
|
||||||
|
resolved = []
|
||||||
|
|
||||||
|
return MisconceptionReport(
|
||||||
|
class_id=class_id,
|
||||||
|
time_range=time_range.value,
|
||||||
|
total_misconceptions=sum(m.frequency for m in misconceptions),
|
||||||
|
unique_concepts=len(set(m.concept_id for m in misconceptions)),
|
||||||
|
most_common=sorted(misconceptions, key=lambda x: x.frequency, reverse=True)[:10],
|
||||||
|
by_unit=by_unit,
|
||||||
|
trending_up=trending_up,
|
||||||
|
resolved=resolved,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/misconceptions/student/{student_id}")
|
||||||
|
async def get_student_misconceptions(
|
||||||
|
student_id: str,
|
||||||
|
time_range: TimeRange = Query(TimeRange.ALL),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get misconceptions for a specific student.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
misconceptions = await db.get_student_misconceptions(
|
||||||
|
student_id=student_id, time_range=time_range.value
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"student_id": student_id,
|
||||||
|
"misconceptions": misconceptions,
|
||||||
|
"recommended_remediation": [
|
||||||
|
{"concept": m["concept_label"], "activity": f"Review {m['unit_id']}/{m['stop_id']}"}
|
||||||
|
for m in misconceptions[:5]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get student misconceptions: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"student_id": student_id,
|
||||||
|
"misconceptions": [],
|
||||||
|
"recommended_remediation": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Student Progress Timeline
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/student/{student_id}/timeline", response_model=StudentProgressTimeline)
|
||||||
|
async def get_student_timeline(
|
||||||
|
student_id: str,
|
||||||
|
time_range: TimeRange = Query(TimeRange.ALL),
|
||||||
|
) -> StudentProgressTimeline:
|
||||||
|
"""
|
||||||
|
Get detailed progress timeline for a student.
|
||||||
|
"""
|
||||||
|
db = await get_analytics_database()
|
||||||
|
timeline = []
|
||||||
|
scores = []
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
sessions = await db.get_student_sessions(
|
||||||
|
student_id=student_id, time_range=time_range.value
|
||||||
|
)
|
||||||
|
|
||||||
|
for session in sessions:
|
||||||
|
timeline.append({
|
||||||
|
"date": session.get("started_at"),
|
||||||
|
"unit_id": session.get("unit_id"),
|
||||||
|
"completed": session.get("completed_at") is not None,
|
||||||
|
"precheck": session.get("precheck_score"),
|
||||||
|
"postcheck": session.get("postcheck_score"),
|
||||||
|
"duration_minutes": session.get("duration_seconds", 0) // 60,
|
||||||
|
})
|
||||||
|
if session.get("postcheck_score") is not None:
|
||||||
|
scores.append(session["postcheck_score"])
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get student timeline: {e}")
|
||||||
|
|
||||||
|
trend = calculate_trend(scores) if scores else "insufficient_data"
|
||||||
|
|
||||||
|
return StudentProgressTimeline(
|
||||||
|
student_id=student_id,
|
||||||
|
student_name=f"Student {student_id[:8]}",
|
||||||
|
units_completed=sum(1 for t in timeline if t["completed"]),
|
||||||
|
total_time_minutes=sum(t["duration_minutes"] for t in timeline),
|
||||||
|
avg_score=statistics.mean(scores) if scores else 0.0,
|
||||||
|
trend=trend,
|
||||||
|
timeline=timeline,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# API Endpoints - Class Comparison
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/compare/classes", response_model=List[ClassComparisonData])
|
||||||
|
async def compare_classes(
|
||||||
|
class_ids: str = Query(..., description="Comma-separated class IDs"),
|
||||||
|
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||||
|
) -> List[ClassComparisonData]:
|
||||||
|
"""
|
||||||
|
Compare performance across multiple classes.
|
||||||
|
"""
|
||||||
|
class_list = [c.strip() for c in class_ids.split(",")]
|
||||||
|
comparisons = []
|
||||||
|
|
||||||
|
db = await get_analytics_database()
|
||||||
|
if db:
|
||||||
|
for class_id in class_list:
|
||||||
|
try:
|
||||||
|
stats = await db.get_class_aggregate_stats(class_id, time_range.value)
|
||||||
|
comparisons.append(ClassComparisonData(
|
||||||
|
class_id=class_id,
|
||||||
|
class_name=stats.get("class_name", f"Klasse {class_id[:8]}"),
|
||||||
|
student_count=stats.get("student_count", 0),
|
||||||
|
units_assigned=stats.get("units_assigned", 0),
|
||||||
|
avg_completion_rate=stats.get("avg_completion_rate", 0.0),
|
||||||
|
avg_learning_gain=stats.get("avg_learning_gain", 0.0),
|
||||||
|
avg_time_per_unit=stats.get("avg_time_per_unit", 0.0),
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get stats for class {class_id}: {e}")
|
||||||
|
|
||||||
|
return sorted(comparisons, key=lambda x: x.avg_learning_gain, reverse=True)
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit API (barrel re-export)
|
||||||
|
# ==============================================
|
||||||
|
# This module was split into:
|
||||||
|
# - unit_models.py (Pydantic models)
|
||||||
|
# - unit_helpers.py (Auth, DB, token, validation helpers)
|
||||||
|
# - unit_routes.py (Definition, session, analytics routes)
|
||||||
|
# - unit_content_routes.py (H5P, worksheet, PDF routes)
|
||||||
|
#
|
||||||
|
# The `router` object is assembled here by including all sub-routers.
|
||||||
|
# Importers that did `from unit_api import router` continue to work.
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .routes import router as _routes_router
|
||||||
|
from .definition_routes import router as _definition_router
|
||||||
|
from .content_routes import router as _content_router
|
||||||
|
|
||||||
|
# Re-export models for any direct importers
|
||||||
|
from .models import ( # noqa: F401
|
||||||
|
UnitDefinitionResponse,
|
||||||
|
CreateSessionRequest,
|
||||||
|
SessionResponse,
|
||||||
|
TelemetryEvent,
|
||||||
|
TelemetryPayload,
|
||||||
|
TelemetryResponse,
|
||||||
|
PostcheckAnswer,
|
||||||
|
CompleteSessionRequest,
|
||||||
|
SessionSummaryResponse,
|
||||||
|
UnitListItem,
|
||||||
|
RecommendedUnit,
|
||||||
|
CreateUnitRequest,
|
||||||
|
UpdateUnitRequest,
|
||||||
|
ValidationError,
|
||||||
|
ValidationResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Re-export helpers for any direct importers
|
||||||
|
from .helpers import ( # noqa: F401
|
||||||
|
get_optional_current_user,
|
||||||
|
get_unit_database,
|
||||||
|
create_session_token,
|
||||||
|
verify_session_token,
|
||||||
|
get_session_from_token,
|
||||||
|
validate_unit_definition,
|
||||||
|
USE_DATABASE,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
SECRET_KEY,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assemble the combined router.
|
||||||
|
# _routes_router and _content_router both use prefix="/api/units",
|
||||||
|
# so we create a plain router and include them without extra prefix.
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(_routes_router)
|
||||||
|
router.include_router(_definition_router)
|
||||||
|
router.include_router(_content_router)
|
||||||
@@ -0,0 +1,160 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit Content Generation Routes
|
||||||
|
# ==============================================
|
||||||
|
# API endpoints for H5P content, worksheets, and PDF generation.
|
||||||
|
# Extracted from unit_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .models import UnitDefinitionResponse
|
||||||
|
from .helpers import get_optional_current_user, get_unit_database
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/content/{unit_id}/h5p")
|
||||||
|
async def generate_h5p_content(
|
||||||
|
unit_id: str,
|
||||||
|
locale: str = Query("de-DE", description="Target locale"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Generate H5P content items for a unit.
|
||||||
|
|
||||||
|
Returns H5P-compatible content structures for:
|
||||||
|
- Drag and Drop (vocabulary matching)
|
||||||
|
- Fill in the Blanks (concept texts)
|
||||||
|
- Multiple Choice (misconception targeting)
|
||||||
|
"""
|
||||||
|
from content_generators import generate_h5p_for_unit, H5PGenerator, generate_h5p_manifest
|
||||||
|
|
||||||
|
# Get unit definition
|
||||||
|
db = await get_unit_database()
|
||||||
|
unit_def = None
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
unit = await db.get_unit_definition(unit_id)
|
||||||
|
if unit:
|
||||||
|
unit_def = unit.get("definition", {})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get unit for H5P generation: {e}")
|
||||||
|
|
||||||
|
if not unit_def:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
generator = H5PGenerator(locale=locale)
|
||||||
|
contents = generator.generate_from_unit(unit_def)
|
||||||
|
manifest = generate_h5p_manifest(contents, unit_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"locale": locale,
|
||||||
|
"generated_count": len(contents),
|
||||||
|
"manifest": manifest,
|
||||||
|
"contents": [c.to_h5p_structure() for c in contents]
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"H5P generation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"H5P generation failed: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/content/{unit_id}/worksheet")
|
||||||
|
async def generate_worksheet_html(
|
||||||
|
unit_id: str,
|
||||||
|
locale: str = Query("de-DE", description="Target locale"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Generate worksheet HTML for a unit.
|
||||||
|
|
||||||
|
Returns HTML that can be:
|
||||||
|
- Displayed in browser
|
||||||
|
- Converted to PDF using weasyprint
|
||||||
|
- Printed directly
|
||||||
|
"""
|
||||||
|
from content_generators import PDFGenerator
|
||||||
|
|
||||||
|
# Get unit definition
|
||||||
|
db = await get_unit_database()
|
||||||
|
unit_def = None
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
unit = await db.get_unit_definition(unit_id)
|
||||||
|
if unit:
|
||||||
|
unit_def = unit.get("definition", {})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get unit for worksheet generation: {e}")
|
||||||
|
|
||||||
|
if not unit_def:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
generator = PDFGenerator(locale=locale)
|
||||||
|
worksheet = generator.generate_from_unit(unit_def)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"locale": locale,
|
||||||
|
"title": worksheet.title,
|
||||||
|
"sections": len(worksheet.sections),
|
||||||
|
"html": worksheet.to_html()
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Worksheet generation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Worksheet generation failed: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/content/{unit_id}/worksheet.pdf")
|
||||||
|
async def download_worksheet_pdf(
|
||||||
|
unit_id: str,
|
||||||
|
locale: str = Query("de-DE", description="Target locale"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Generate and download worksheet as PDF.
|
||||||
|
|
||||||
|
Requires weasyprint to be installed on the server.
|
||||||
|
"""
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
# Get unit definition
|
||||||
|
db = await get_unit_database()
|
||||||
|
unit_def = None
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
unit = await db.get_unit_definition(unit_id)
|
||||||
|
if unit:
|
||||||
|
unit_def = unit.get("definition", {})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get unit for PDF generation: {e}")
|
||||||
|
|
||||||
|
if not unit_def:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from content_generators import generate_worksheet_pdf
|
||||||
|
pdf_bytes = generate_worksheet_pdf(unit_def, locale)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=pdf_bytes,
|
||||||
|
media_type="application/pdf",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": f'attachment; filename="{unit_id}_worksheet.pdf"'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=501,
|
||||||
|
detail="PDF generation not available. Install weasyprint: pip install weasyprint"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"PDF generation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"PDF generation failed: {str(e)}")
|
||||||
@@ -0,0 +1,301 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit Definition CRUD Routes
|
||||||
|
# ==============================================
|
||||||
|
# Endpoints for creating, updating, deleting, and validating
|
||||||
|
# unit definitions. Extracted from unit_routes.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
UnitDefinitionResponse,
|
||||||
|
CreateUnitRequest,
|
||||||
|
UpdateUnitRequest,
|
||||||
|
ValidationResult,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
get_optional_current_user,
|
||||||
|
get_unit_database,
|
||||||
|
validate_unit_definition,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/definitions", response_model=UnitDefinitionResponse)
|
||||||
|
async def create_unit_definition(
|
||||||
|
request_data: CreateUnitRequest,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> UnitDefinitionResponse:
|
||||||
|
"""
|
||||||
|
Create a new unit definition.
|
||||||
|
|
||||||
|
- Validates unit structure
|
||||||
|
- Saves to database or JSON file
|
||||||
|
- Returns created unit
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Build full definition
|
||||||
|
definition = {
|
||||||
|
"unit_id": request_data.unit_id,
|
||||||
|
"template": request_data.template,
|
||||||
|
"version": request_data.version,
|
||||||
|
"locale": request_data.locale,
|
||||||
|
"grade_band": request_data.grade_band,
|
||||||
|
"duration_minutes": request_data.duration_minutes,
|
||||||
|
"difficulty": request_data.difficulty,
|
||||||
|
"subject": request_data.subject,
|
||||||
|
"topic": request_data.topic,
|
||||||
|
"learning_objectives": request_data.learning_objectives,
|
||||||
|
"stops": request_data.stops,
|
||||||
|
"precheck": request_data.precheck or {
|
||||||
|
"question_set_id": f"{request_data.unit_id}_precheck",
|
||||||
|
"required": True,
|
||||||
|
"time_limit_seconds": 120
|
||||||
|
},
|
||||||
|
"postcheck": request_data.postcheck or {
|
||||||
|
"question_set_id": f"{request_data.unit_id}_postcheck",
|
||||||
|
"required": True,
|
||||||
|
"time_limit_seconds": 180
|
||||||
|
},
|
||||||
|
"teacher_controls": request_data.teacher_controls or {
|
||||||
|
"allow_skip": True,
|
||||||
|
"allow_replay": True,
|
||||||
|
"max_time_per_stop_sec": 90,
|
||||||
|
"show_hints": True,
|
||||||
|
"require_precheck": True,
|
||||||
|
"require_postcheck": True
|
||||||
|
},
|
||||||
|
"assets": request_data.assets or {},
|
||||||
|
"metadata": request_data.metadata or {
|
||||||
|
"author": user.get("email", "Unknown") if user else "Unknown",
|
||||||
|
"created": datetime.utcnow().isoformat(),
|
||||||
|
"curriculum_reference": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
validation = validate_unit_definition(definition)
|
||||||
|
if not validation.valid:
|
||||||
|
error_msgs = [f"{e.field}: {e.message}" for e in validation.errors]
|
||||||
|
raise HTTPException(status_code=400, detail=f"Validierung fehlgeschlagen: {'; '.join(error_msgs)}")
|
||||||
|
|
||||||
|
# Check if unit_id already exists
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
existing = await db.get_unit_definition(request_data.unit_id)
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
||||||
|
|
||||||
|
# Save to database
|
||||||
|
await db.create_unit_definition(
|
||||||
|
unit_id=request_data.unit_id,
|
||||||
|
template=request_data.template,
|
||||||
|
version=request_data.version,
|
||||||
|
locale=request_data.locale,
|
||||||
|
grade_band=request_data.grade_band,
|
||||||
|
duration_minutes=request_data.duration_minutes,
|
||||||
|
difficulty=request_data.difficulty,
|
||||||
|
definition=definition,
|
||||||
|
status=request_data.status
|
||||||
|
)
|
||||||
|
logger.info(f"Unit created in database: {request_data.unit_id}")
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Database save failed, using JSON fallback: {e}")
|
||||||
|
# Fallback to JSON
|
||||||
|
units_dir = Path(__file__).parent / "data" / "units"
|
||||||
|
units_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
json_path = units_dir / f"{request_data.unit_id}.json"
|
||||||
|
if json_path.exists():
|
||||||
|
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
||||||
|
with open(json_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(definition, f, ensure_ascii=False, indent=2)
|
||||||
|
logger.info(f"Unit created as JSON: {json_path}")
|
||||||
|
else:
|
||||||
|
# JSON only mode
|
||||||
|
units_dir = Path(__file__).parent / "data" / "units"
|
||||||
|
units_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
json_path = units_dir / f"{request_data.unit_id}.json"
|
||||||
|
if json_path.exists():
|
||||||
|
raise HTTPException(status_code=409, detail=f"Unit existiert bereits: {request_data.unit_id}")
|
||||||
|
with open(json_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(definition, f, ensure_ascii=False, indent=2)
|
||||||
|
logger.info(f"Unit created as JSON: {json_path}")
|
||||||
|
|
||||||
|
return UnitDefinitionResponse(
|
||||||
|
unit_id=request_data.unit_id,
|
||||||
|
template=request_data.template,
|
||||||
|
version=request_data.version,
|
||||||
|
locale=request_data.locale,
|
||||||
|
grade_band=request_data.grade_band,
|
||||||
|
duration_minutes=request_data.duration_minutes,
|
||||||
|
difficulty=request_data.difficulty,
|
||||||
|
definition=definition
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/definitions/{unit_id}", response_model=UnitDefinitionResponse)
|
||||||
|
async def update_unit_definition(
|
||||||
|
unit_id: str,
|
||||||
|
request_data: UpdateUnitRequest,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> UnitDefinitionResponse:
|
||||||
|
"""
|
||||||
|
Update an existing unit definition.
|
||||||
|
|
||||||
|
- Merges updates with existing definition
|
||||||
|
- Re-validates
|
||||||
|
- Saves updated version
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Get existing unit
|
||||||
|
db = await get_unit_database()
|
||||||
|
existing = None
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
existing = await db.get_unit_definition(unit_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Database read failed: {e}")
|
||||||
|
|
||||||
|
if not existing:
|
||||||
|
# Try JSON file
|
||||||
|
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
||||||
|
if json_path.exists():
|
||||||
|
with open(json_path, "r", encoding="utf-8") as f:
|
||||||
|
file_data = json.load(f)
|
||||||
|
existing = {
|
||||||
|
"unit_id": file_data.get("unit_id"),
|
||||||
|
"template": file_data.get("template"),
|
||||||
|
"version": file_data.get("version", "1.0.0"),
|
||||||
|
"locale": file_data.get("locale", ["de-DE"]),
|
||||||
|
"grade_band": file_data.get("grade_band", []),
|
||||||
|
"duration_minutes": file_data.get("duration_minutes", 8),
|
||||||
|
"difficulty": file_data.get("difficulty", "base"),
|
||||||
|
"definition": file_data
|
||||||
|
}
|
||||||
|
|
||||||
|
if not existing:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit nicht gefunden: {unit_id}")
|
||||||
|
|
||||||
|
# Merge updates into existing definition
|
||||||
|
definition = existing.get("definition", {})
|
||||||
|
update_dict = request_data.model_dump(exclude_unset=True)
|
||||||
|
|
||||||
|
for key, value in update_dict.items():
|
||||||
|
if value is not None:
|
||||||
|
definition[key] = value
|
||||||
|
|
||||||
|
# Validate updated definition
|
||||||
|
validation = validate_unit_definition(definition)
|
||||||
|
if not validation.valid:
|
||||||
|
error_msgs = [f"{e.field}: {e.message}" for e in validation.errors]
|
||||||
|
raise HTTPException(status_code=400, detail=f"Validierung fehlgeschlagen: {'; '.join(error_msgs)}")
|
||||||
|
|
||||||
|
# Save
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
await db.update_unit_definition(
|
||||||
|
unit_id=unit_id,
|
||||||
|
version=definition.get("version"),
|
||||||
|
locale=definition.get("locale"),
|
||||||
|
grade_band=definition.get("grade_band"),
|
||||||
|
duration_minutes=definition.get("duration_minutes"),
|
||||||
|
difficulty=definition.get("difficulty"),
|
||||||
|
definition=definition,
|
||||||
|
status=update_dict.get("status")
|
||||||
|
)
|
||||||
|
logger.info(f"Unit updated in database: {unit_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Database update failed, using JSON: {e}")
|
||||||
|
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
||||||
|
with open(json_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(definition, f, ensure_ascii=False, indent=2)
|
||||||
|
else:
|
||||||
|
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
||||||
|
with open(json_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(definition, f, ensure_ascii=False, indent=2)
|
||||||
|
logger.info(f"Unit updated as JSON: {json_path}")
|
||||||
|
|
||||||
|
return UnitDefinitionResponse(
|
||||||
|
unit_id=unit_id,
|
||||||
|
template=definition.get("template", existing.get("template")),
|
||||||
|
version=definition.get("version", existing.get("version", "1.0.0")),
|
||||||
|
locale=definition.get("locale", existing.get("locale", ["de-DE"])),
|
||||||
|
grade_band=definition.get("grade_band", existing.get("grade_band", [])),
|
||||||
|
duration_minutes=definition.get("duration_minutes", existing.get("duration_minutes", 8)),
|
||||||
|
difficulty=definition.get("difficulty", existing.get("difficulty", "base")),
|
||||||
|
definition=definition
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/definitions/{unit_id}")
|
||||||
|
async def delete_unit_definition(
|
||||||
|
unit_id: str,
|
||||||
|
force: bool = Query(False, description="Force delete even if published"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Delete a unit definition.
|
||||||
|
|
||||||
|
- By default, only drafts can be deleted
|
||||||
|
- Use force=true to delete published units
|
||||||
|
"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db = await get_unit_database()
|
||||||
|
deleted = False
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
existing = await db.get_unit_definition(unit_id)
|
||||||
|
if existing:
|
||||||
|
status = existing.get("status", "draft")
|
||||||
|
if status == "published" and not force:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Veroeffentlichte Units koennen nicht geloescht werden. Verwende force=true."
|
||||||
|
)
|
||||||
|
await db.delete_unit_definition(unit_id)
|
||||||
|
deleted = True
|
||||||
|
logger.info(f"Unit deleted from database: {unit_id}")
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Database delete failed: {e}")
|
||||||
|
|
||||||
|
# Also check JSON file
|
||||||
|
json_path = Path(__file__).parent / "data" / "units" / f"{unit_id}.json"
|
||||||
|
if json_path.exists():
|
||||||
|
json_path.unlink()
|
||||||
|
deleted = True
|
||||||
|
logger.info(f"Unit JSON deleted: {json_path}")
|
||||||
|
|
||||||
|
if not deleted:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit nicht gefunden: {unit_id}")
|
||||||
|
|
||||||
|
return {"success": True, "unit_id": unit_id, "message": "Unit geloescht"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/definitions/validate", response_model=ValidationResult)
|
||||||
|
async def validate_unit(
|
||||||
|
unit_data: Dict[str, Any],
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> ValidationResult:
|
||||||
|
"""
|
||||||
|
Validate a unit definition without saving.
|
||||||
|
|
||||||
|
Returns validation result with errors and warnings.
|
||||||
|
"""
|
||||||
|
return validate_unit_definition(unit_data)
|
||||||
@@ -0,0 +1,204 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit API Helpers
|
||||||
|
# ==============================================
|
||||||
|
# Auth, database, token, and validation helpers for the Unit API.
|
||||||
|
# Extracted from unit_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import HTTPException, Request
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import jwt
|
||||||
|
|
||||||
|
from .models import ValidationError, ValidationResult
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Feature flags
|
||||||
|
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||||
|
REQUIRE_AUTH = os.getenv("GAME_REQUIRE_AUTH", "false").lower() == "true"
|
||||||
|
SECRET_KEY = os.getenv("JWT_SECRET_KEY", "dev-secret-key-change-in-production")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Auth Dependency (reuse from game_api)
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
async def get_optional_current_user(request: Request) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Optional auth dependency for Unit API."""
|
||||||
|
if not REQUIRE_AUTH:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from auth import get_current_user
|
||||||
|
return await get_current_user(request)
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Auth module not available")
|
||||||
|
return None
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Auth error: {e}")
|
||||||
|
raise HTTPException(status_code=401, detail="Authentication failed")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Database Integration
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
_unit_db = None
|
||||||
|
|
||||||
|
async def get_unit_database():
|
||||||
|
"""Get unit database instance with lazy initialization."""
|
||||||
|
global _unit_db
|
||||||
|
if not USE_DATABASE:
|
||||||
|
return None
|
||||||
|
if _unit_db is None:
|
||||||
|
try:
|
||||||
|
from unit.database import get_unit_db
|
||||||
|
_unit_db = await get_unit_db()
|
||||||
|
logger.info("Unit database initialized")
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Unit database module not available")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Unit database not available: {e}")
|
||||||
|
return _unit_db
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Token Helpers
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
def create_session_token(session_id: str, student_id: str, expires_hours: int = 4) -> str:
|
||||||
|
"""Create a JWT session token for telemetry authentication."""
|
||||||
|
payload = {
|
||||||
|
"session_id": session_id,
|
||||||
|
"student_id": student_id,
|
||||||
|
"exp": datetime.utcnow() + timedelta(hours=expires_hours),
|
||||||
|
"iat": datetime.utcnow(),
|
||||||
|
}
|
||||||
|
return jwt.encode(payload, SECRET_KEY, algorithm="HS256")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_session_token(token: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Verify a session token and return payload."""
|
||||||
|
try:
|
||||||
|
return jwt.decode(token, SECRET_KEY, algorithms=["HS256"])
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
return None
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_session_from_token(request: Request) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Extract and verify session from Authorization header."""
|
||||||
|
auth_header = request.headers.get("Authorization", "")
|
||||||
|
if not auth_header.startswith("Bearer "):
|
||||||
|
return None
|
||||||
|
token = auth_header[7:]
|
||||||
|
return verify_session_token(token)
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Validation
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
def validate_unit_definition(unit_data: Dict[str, Any]) -> ValidationResult:
|
||||||
|
"""
|
||||||
|
Validate a unit definition structure.
|
||||||
|
|
||||||
|
Returns validation result with errors and warnings.
|
||||||
|
"""
|
||||||
|
errors: List[ValidationError] = []
|
||||||
|
warnings: List[ValidationError] = []
|
||||||
|
|
||||||
|
# Required fields
|
||||||
|
if not unit_data.get("unit_id"):
|
||||||
|
errors.append(ValidationError(field="unit_id", message="unit_id ist erforderlich"))
|
||||||
|
|
||||||
|
if not unit_data.get("template"):
|
||||||
|
errors.append(ValidationError(field="template", message="template ist erforderlich"))
|
||||||
|
elif unit_data["template"] not in ["flight_path", "station_loop"]:
|
||||||
|
errors.append(ValidationError(
|
||||||
|
field="template",
|
||||||
|
message="template muss 'flight_path' oder 'station_loop' sein"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Validate stops
|
||||||
|
stops = unit_data.get("stops", [])
|
||||||
|
if not stops:
|
||||||
|
errors.append(ValidationError(field="stops", message="Mindestens 1 Stop erforderlich"))
|
||||||
|
else:
|
||||||
|
# Check minimum stops for flight_path
|
||||||
|
if unit_data.get("template") == "flight_path" and len(stops) < 3:
|
||||||
|
warnings.append(ValidationError(
|
||||||
|
field="stops",
|
||||||
|
message="FlightPath sollte mindestens 3 Stops haben",
|
||||||
|
severity="warning"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Validate each stop
|
||||||
|
stop_ids = set()
|
||||||
|
for i, stop in enumerate(stops):
|
||||||
|
if not stop.get("stop_id"):
|
||||||
|
errors.append(ValidationError(
|
||||||
|
field=f"stops[{i}].stop_id",
|
||||||
|
message=f"Stop {i}: stop_id fehlt"
|
||||||
|
))
|
||||||
|
else:
|
||||||
|
if stop["stop_id"] in stop_ids:
|
||||||
|
errors.append(ValidationError(
|
||||||
|
field=f"stops[{i}].stop_id",
|
||||||
|
message=f"Stop {i}: Doppelte stop_id '{stop['stop_id']}'"
|
||||||
|
))
|
||||||
|
stop_ids.add(stop["stop_id"])
|
||||||
|
|
||||||
|
# Check interaction type
|
||||||
|
interaction = stop.get("interaction", {})
|
||||||
|
if not interaction.get("type"):
|
||||||
|
errors.append(ValidationError(
|
||||||
|
field=f"stops[{i}].interaction.type",
|
||||||
|
message=f"Stop {stop.get('stop_id', i)}: Interaktionstyp fehlt"
|
||||||
|
))
|
||||||
|
elif interaction["type"] not in [
|
||||||
|
"aim_and_pass", "slider_adjust", "slider_equivalence",
|
||||||
|
"sequence_arrange", "toggle_switch", "drag_match",
|
||||||
|
"error_find", "transfer_apply"
|
||||||
|
]:
|
||||||
|
warnings.append(ValidationError(
|
||||||
|
field=f"stops[{i}].interaction.type",
|
||||||
|
message=f"Stop {stop.get('stop_id', i)}: Unbekannter Interaktionstyp '{interaction['type']}'",
|
||||||
|
severity="warning"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Check for label
|
||||||
|
if not stop.get("label"):
|
||||||
|
warnings.append(ValidationError(
|
||||||
|
field=f"stops[{i}].label",
|
||||||
|
message=f"Stop {stop.get('stop_id', i)}: Label fehlt",
|
||||||
|
severity="warning"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Validate duration
|
||||||
|
duration = unit_data.get("duration_minutes", 0)
|
||||||
|
if duration < 3 or duration > 20:
|
||||||
|
warnings.append(ValidationError(
|
||||||
|
field="duration_minutes",
|
||||||
|
message="Dauer sollte zwischen 3 und 20 Minuten liegen",
|
||||||
|
severity="warning"
|
||||||
|
))
|
||||||
|
|
||||||
|
# Validate difficulty
|
||||||
|
if unit_data.get("difficulty") and unit_data["difficulty"] not in ["base", "advanced"]:
|
||||||
|
warnings.append(ValidationError(
|
||||||
|
field="difficulty",
|
||||||
|
message="difficulty sollte 'base' oder 'advanced' sein",
|
||||||
|
severity="warning"
|
||||||
|
))
|
||||||
|
|
||||||
|
return ValidationResult(
|
||||||
|
valid=len(errors) == 0,
|
||||||
|
errors=errors,
|
||||||
|
warnings=warnings
|
||||||
|
)
|
||||||
@@ -0,0 +1,178 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
|
import json
|
||||||
|
import threading
|
||||||
|
|
||||||
|
# Basisverzeichnis für Arbeitsblätter & Lerneinheiten
|
||||||
|
BASE_DIR = Path.home() / "Arbeitsblaetter"
|
||||||
|
LEARNING_UNITS_DIR = BASE_DIR / "Lerneinheiten"
|
||||||
|
LEARNING_UNITS_FILE = LEARNING_UNITS_DIR / "learning_units.json"
|
||||||
|
|
||||||
|
# Thread-Lock, damit Dateizugriffe sicher bleiben
|
||||||
|
_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
|
class LearningUnitBase(BaseModel):
|
||||||
|
title: str = Field(..., description="Titel der Lerneinheit, z.B. 'Das Auge – Klasse 7'")
|
||||||
|
description: Optional[str] = Field(None, description="Freitext-Beschreibung")
|
||||||
|
topic: Optional[str] = Field(None, description="Kurz-Thema, z.B. 'Auge'")
|
||||||
|
grade_level: Optional[str] = Field(None, description="Klassenstufe, z.B. '7'")
|
||||||
|
language: Optional[str] = Field("de", description="Hauptsprache der Lerneinheit (z.B. 'de', 'tr')")
|
||||||
|
worksheet_files: List[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Liste der zugeordneten Arbeitsblatt-Dateien (Basenames oder Pfade)"
|
||||||
|
)
|
||||||
|
status: str = Field(
|
||||||
|
"raw",
|
||||||
|
description="Pipeline-Status: raw, cleaned, qa_generated, mc_generated, cloze_generated"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LearningUnitCreate(LearningUnitBase):
|
||||||
|
"""Payload zum Erstellen einer neuen Lerneinheit."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LearningUnitUpdate(BaseModel):
|
||||||
|
"""Teil-Update für eine Lerneinheit."""
|
||||||
|
title: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
topic: Optional[str] = None
|
||||||
|
grade_level: Optional[str] = None
|
||||||
|
language: Optional[str] = None
|
||||||
|
worksheet_files: Optional[List[str]] = None
|
||||||
|
status: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LearningUnit(LearningUnitBase):
|
||||||
|
id: str
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict) -> "LearningUnit":
|
||||||
|
data = data.copy()
|
||||||
|
if isinstance(data.get("created_at"), str):
|
||||||
|
data["created_at"] = datetime.fromisoformat(data["created_at"])
|
||||||
|
if isinstance(data.get("updated_at"), str):
|
||||||
|
data["updated_at"] = datetime.fromisoformat(data["updated_at"])
|
||||||
|
return cls(**data)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict:
|
||||||
|
d = self.dict()
|
||||||
|
d["created_at"] = self.created_at.isoformat()
|
||||||
|
d["updated_at"] = self.updated_at.isoformat()
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_storage():
|
||||||
|
"""Sorgt dafür, dass der Ordner und die JSON-Datei existieren."""
|
||||||
|
LEARNING_UNITS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
if not LEARNING_UNITS_FILE.exists():
|
||||||
|
with LEARNING_UNITS_FILE.open("w", encoding="utf-8") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_all_units() -> Dict[str, Dict]:
|
||||||
|
_ensure_storage()
|
||||||
|
with LEARNING_UNITS_FILE.open("r", encoding="utf-8") as f:
|
||||||
|
try:
|
||||||
|
data = json.load(f)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return {}
|
||||||
|
return data
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _save_all_units(raw: Dict[str, Dict]) -> None:
|
||||||
|
_ensure_storage()
|
||||||
|
with LEARNING_UNITS_FILE.open("w", encoding="utf-8") as f:
|
||||||
|
json.dump(raw, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def list_learning_units() -> List[LearningUnit]:
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
return [LearningUnit.from_dict(v) for v in raw.values()]
|
||||||
|
|
||||||
|
|
||||||
|
def get_learning_unit(unit_id: str) -> Optional[LearningUnit]:
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
data = raw.get(unit_id)
|
||||||
|
if not data:
|
||||||
|
return None
|
||||||
|
return LearningUnit.from_dict(data)
|
||||||
|
|
||||||
|
|
||||||
|
def create_learning_unit(payload: LearningUnitCreate) -> LearningUnit:
|
||||||
|
now = datetime.utcnow()
|
||||||
|
lu = LearningUnit(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
|
**payload.dict()
|
||||||
|
)
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
raw[lu.id] = lu.to_dict()
|
||||||
|
_save_all_units(raw)
|
||||||
|
return lu
|
||||||
|
|
||||||
|
|
||||||
|
def update_learning_unit(unit_id: str, payload: LearningUnitUpdate) -> Optional[LearningUnit]:
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
existing = raw.get(unit_id)
|
||||||
|
if not existing:
|
||||||
|
return None
|
||||||
|
|
||||||
|
lu = LearningUnit.from_dict(existing)
|
||||||
|
update_data = payload.dict(exclude_unset=True)
|
||||||
|
|
||||||
|
for field, value in update_data.items():
|
||||||
|
setattr(lu, field, value)
|
||||||
|
|
||||||
|
lu.updated_at = datetime.utcnow()
|
||||||
|
raw[lu.id] = lu.to_dict()
|
||||||
|
_save_all_units(raw)
|
||||||
|
return lu
|
||||||
|
|
||||||
|
|
||||||
|
def delete_learning_unit(unit_id: str) -> bool:
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
if unit_id not in raw:
|
||||||
|
return False
|
||||||
|
del raw[unit_id]
|
||||||
|
_save_all_units(raw)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def attach_worksheets(unit_id: str, worksheet_files: List[str]) -> Optional[LearningUnit]:
|
||||||
|
"""
|
||||||
|
Hängt eine Liste von Arbeitsblatt-Dateien an eine bestehende Lerneinheit an.
|
||||||
|
Doppelte Einträge werden vermieden.
|
||||||
|
"""
|
||||||
|
with _lock:
|
||||||
|
raw = _load_all_units()
|
||||||
|
existing = raw.get(unit_id)
|
||||||
|
if not existing:
|
||||||
|
return None
|
||||||
|
|
||||||
|
lu = LearningUnit.from_dict(existing)
|
||||||
|
current_set = set(lu.worksheet_files)
|
||||||
|
for f in worksheet_files:
|
||||||
|
current_set.add(f)
|
||||||
|
lu.worksheet_files = sorted(current_set)
|
||||||
|
lu.updated_at = datetime.utcnow()
|
||||||
|
|
||||||
|
raw[lu.id] = lu.to_dict()
|
||||||
|
_save_all_units(raw)
|
||||||
|
return lu
|
||||||
|
|
||||||
@@ -0,0 +1,376 @@
|
|||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from .learning import (
|
||||||
|
LearningUnit,
|
||||||
|
LearningUnitCreate,
|
||||||
|
LearningUnitUpdate,
|
||||||
|
list_learning_units,
|
||||||
|
get_learning_unit,
|
||||||
|
create_learning_unit,
|
||||||
|
update_learning_unit,
|
||||||
|
delete_learning_unit,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix="/learning-units",
|
||||||
|
tags=["learning-units"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Payload-Modelle für das Frontend ----------
|
||||||
|
|
||||||
|
|
||||||
|
class LearningUnitCreatePayload(BaseModel):
|
||||||
|
"""
|
||||||
|
Payload so, wie er aus dem Frontend kommt:
|
||||||
|
{
|
||||||
|
"student": "...",
|
||||||
|
"subject": "...",
|
||||||
|
"title": "...",
|
||||||
|
"grade": "7a"
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
student: Optional[str] = None
|
||||||
|
subject: Optional[str] = None
|
||||||
|
title: Optional[str] = None
|
||||||
|
grade: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class AttachWorksheetsPayload(BaseModel):
|
||||||
|
worksheet_files: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class RemoveWorksheetPayload(BaseModel):
|
||||||
|
worksheet_file: str
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateFromAnalysisPayload(BaseModel):
|
||||||
|
analysis_data: Dict[str, Any]
|
||||||
|
num_questions: int = 8
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Hilfsfunktion: Backend-Modell -> Frontend-Objekt ----------
|
||||||
|
|
||||||
|
|
||||||
|
def unit_to_frontend_dict(lu: LearningUnit) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Wandelt eine LearningUnit in das Format um, das das Frontend erwartet.
|
||||||
|
Wichtig sind:
|
||||||
|
- id
|
||||||
|
- label (sichtbarer Name)
|
||||||
|
- meta (Untertitelzeile)
|
||||||
|
- worksheet_files (Liste von Dateinamen)
|
||||||
|
"""
|
||||||
|
label = lu.title or "Lerneinheit"
|
||||||
|
|
||||||
|
# Meta-Text: z.B. "Thema: Auge · Klasse: 7a · angelegt am 10.12.2025"
|
||||||
|
meta_parts: List[str] = []
|
||||||
|
if lu.topic:
|
||||||
|
meta_parts.append(f"Thema: {lu.topic}")
|
||||||
|
if lu.grade_level:
|
||||||
|
meta_parts.append(f"Klasse: {lu.grade_level}")
|
||||||
|
created_str = lu.created_at.strftime("%d.%m.%Y")
|
||||||
|
meta_parts.append(f"angelegt am {created_str}")
|
||||||
|
|
||||||
|
meta = " · ".join(meta_parts)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": lu.id,
|
||||||
|
"label": label,
|
||||||
|
"meta": meta,
|
||||||
|
"title": lu.title,
|
||||||
|
"topic": lu.topic,
|
||||||
|
"grade_level": lu.grade_level,
|
||||||
|
"language": lu.language,
|
||||||
|
"status": lu.status,
|
||||||
|
"worksheet_files": lu.worksheet_files,
|
||||||
|
"created_at": lu.created_at.isoformat(),
|
||||||
|
"updated_at": lu.updated_at.isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Endpunkte ----------
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[Dict[str, Any]])
|
||||||
|
def api_list_learning_units():
|
||||||
|
"""Alle Lerneinheiten für das Frontend auflisten."""
|
||||||
|
units = list_learning_units()
|
||||||
|
return [unit_to_frontend_dict(u) for u in units]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/", response_model=Dict[str, Any])
|
||||||
|
def api_create_learning_unit(payload: LearningUnitCreatePayload):
|
||||||
|
"""
|
||||||
|
Neue Lerneinheit anlegen.
|
||||||
|
Mapped das Frontend-Payload (student/subject/title/grade)
|
||||||
|
auf das generische LearningUnit-Modell.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Mindestens eines der Felder muss gesetzt sein
|
||||||
|
if not (payload.student or payload.subject or payload.title):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Bitte mindestens Schüler/in, Fach oder Thema angeben.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Titel/Topic bestimmen
|
||||||
|
# sichtbarer Titel: bevorzugt Thema (title), sonst Kombination
|
||||||
|
if payload.title:
|
||||||
|
title = payload.title
|
||||||
|
else:
|
||||||
|
parts = []
|
||||||
|
if payload.subject:
|
||||||
|
parts.append(payload.subject)
|
||||||
|
if payload.student:
|
||||||
|
parts.append(payload.student)
|
||||||
|
title = " – ".join(parts) if parts else "Lerneinheit"
|
||||||
|
|
||||||
|
topic = payload.title or payload.subject or None
|
||||||
|
grade_level = payload.grade or None
|
||||||
|
|
||||||
|
lu_create = LearningUnitCreate(
|
||||||
|
title=title,
|
||||||
|
description=None,
|
||||||
|
topic=topic,
|
||||||
|
grade_level=grade_level,
|
||||||
|
language="de",
|
||||||
|
worksheet_files=[],
|
||||||
|
status="raw",
|
||||||
|
)
|
||||||
|
|
||||||
|
lu = create_learning_unit(lu_create)
|
||||||
|
return unit_to_frontend_dict(lu)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/attach-worksheets", response_model=Dict[str, Any])
|
||||||
|
def api_attach_worksheets(unit_id: str, payload: AttachWorksheetsPayload):
|
||||||
|
"""
|
||||||
|
Fügt der Lerneinheit eine oder mehrere Arbeitsblätter hinzu.
|
||||||
|
"""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
files_to_add = [f for f in payload.worksheet_files if f not in lu.worksheet_files]
|
||||||
|
if files_to_add:
|
||||||
|
new_list = lu.worksheet_files + files_to_add
|
||||||
|
update = LearningUnitUpdate(worksheet_files=new_list)
|
||||||
|
lu = update_learning_unit(unit_id, update)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=500, detail="Lerneinheit konnte nicht aktualisiert werden.")
|
||||||
|
|
||||||
|
return unit_to_frontend_dict(lu)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/remove-worksheet", response_model=Dict[str, Any])
|
||||||
|
def api_remove_worksheet(unit_id: str, payload: RemoveWorksheetPayload):
|
||||||
|
"""
|
||||||
|
Entfernt genau ein Arbeitsblatt aus der Lerneinheit.
|
||||||
|
"""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
if payload.worksheet_file not in lu.worksheet_files:
|
||||||
|
# Nichts zu tun, aber kein Fehler – einfach unverändert zurückgeben
|
||||||
|
return unit_to_frontend_dict(lu)
|
||||||
|
|
||||||
|
new_list = [f for f in lu.worksheet_files if f != payload.worksheet_file]
|
||||||
|
update = LearningUnitUpdate(worksheet_files=new_list)
|
||||||
|
lu = update_learning_unit(unit_id, update)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=500, detail="Lerneinheit konnte nicht aktualisiert werden.")
|
||||||
|
|
||||||
|
return unit_to_frontend_dict(lu)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{unit_id}")
|
||||||
|
def api_delete_learning_unit(unit_id: str):
|
||||||
|
"""
|
||||||
|
Lerneinheit komplett löschen (aktuell vom Frontend noch nicht verwendet).
|
||||||
|
"""
|
||||||
|
ok = delete_learning_unit(unit_id)
|
||||||
|
if not ok:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
return {"status": "deleted", "id": unit_id}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- Generator-Endpunkte ----------
|
||||||
|
|
||||||
|
LERNEINHEITEN_DIR = os.path.expanduser("~/Arbeitsblaetter/Lerneinheiten")
|
||||||
|
|
||||||
|
|
||||||
|
def _save_analysis_and_get_path(unit_id: str, analysis_data: Dict[str, Any]) -> Path:
|
||||||
|
"""Save analysis_data to disk and return the path."""
|
||||||
|
os.makedirs(LERNEINHEITEN_DIR, exist_ok=True)
|
||||||
|
path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_analyse.json"
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(analysis_data, f, ensure_ascii=False, indent=2)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/generate-qa")
|
||||||
|
def api_generate_qa(unit_id: str, payload: GenerateFromAnalysisPayload):
|
||||||
|
"""Generate Q&A items with Leitner fields from analysis data."""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ai_processing.qa_generator import generate_qa_from_analysis
|
||||||
|
qa_path = generate_qa_from_analysis(analysis_path, num_questions=payload.num_questions)
|
||||||
|
with open(qa_path, "r", encoding="utf-8") as f:
|
||||||
|
qa_data = json.load(f)
|
||||||
|
|
||||||
|
# Update unit status
|
||||||
|
update_learning_unit(unit_id, LearningUnitUpdate(status="qa_generated"))
|
||||||
|
logger.info(f"Generated QA for unit {unit_id}: {len(qa_data.get('qa_items', []))} items")
|
||||||
|
return qa_data
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"QA generation failed for {unit_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"QA-Generierung fehlgeschlagen: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/generate-mc")
|
||||||
|
def api_generate_mc(unit_id: str, payload: GenerateFromAnalysisPayload):
|
||||||
|
"""Generate multiple choice questions from analysis data."""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ai_processing.mc_generator import generate_mc_from_analysis
|
||||||
|
mc_path = generate_mc_from_analysis(analysis_path, num_questions=payload.num_questions)
|
||||||
|
with open(mc_path, "r", encoding="utf-8") as f:
|
||||||
|
mc_data = json.load(f)
|
||||||
|
|
||||||
|
update_learning_unit(unit_id, LearningUnitUpdate(status="mc_generated"))
|
||||||
|
logger.info(f"Generated MC for unit {unit_id}: {len(mc_data.get('questions', []))} questions")
|
||||||
|
return mc_data
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"MC generation failed for {unit_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"MC-Generierung fehlgeschlagen: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/generate-cloze")
|
||||||
|
def api_generate_cloze(unit_id: str, payload: GenerateFromAnalysisPayload):
|
||||||
|
"""Generate cloze (fill-in-the-blank) items from analysis data."""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
analysis_path = _save_analysis_and_get_path(unit_id, payload.analysis_data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ai_processing.cloze_generator import generate_cloze_from_analysis
|
||||||
|
cloze_path = generate_cloze_from_analysis(analysis_path)
|
||||||
|
with open(cloze_path, "r", encoding="utf-8") as f:
|
||||||
|
cloze_data = json.load(f)
|
||||||
|
|
||||||
|
update_learning_unit(unit_id, LearningUnitUpdate(status="cloze_generated"))
|
||||||
|
logger.info(f"Generated Cloze for unit {unit_id}: {len(cloze_data.get('cloze_items', []))} items")
|
||||||
|
return cloze_data
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Cloze generation failed for {unit_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Cloze-Generierung fehlgeschlagen: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{unit_id}/qa")
|
||||||
|
def api_get_qa(unit_id: str):
|
||||||
|
"""Get generated QA items for a unit."""
|
||||||
|
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
||||||
|
if not qa_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
||||||
|
with open(qa_path, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{unit_id}/mc")
|
||||||
|
def api_get_mc(unit_id: str):
|
||||||
|
"""Get generated MC questions for a unit."""
|
||||||
|
mc_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_mc.json"
|
||||||
|
if not mc_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Keine MC-Daten gefunden.")
|
||||||
|
with open(mc_path, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{unit_id}/cloze")
|
||||||
|
def api_get_cloze(unit_id: str):
|
||||||
|
"""Get generated cloze items for a unit."""
|
||||||
|
cloze_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_cloze.json"
|
||||||
|
if not cloze_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Keine Cloze-Daten gefunden.")
|
||||||
|
with open(cloze_path, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/leitner/update")
|
||||||
|
def api_update_leitner(unit_id: str, item_id: str, correct: bool):
|
||||||
|
"""Update Leitner progress for a QA item."""
|
||||||
|
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
||||||
|
if not qa_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
||||||
|
try:
|
||||||
|
from ai_processing.qa_generator import update_leitner_progress
|
||||||
|
result = update_leitner_progress(qa_path, item_id, correct)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{unit_id}/leitner/next")
|
||||||
|
def api_get_next_review(unit_id: str, limit: int = 5):
|
||||||
|
"""Get next Leitner review items."""
|
||||||
|
qa_path = Path(LERNEINHEITEN_DIR) / f"{unit_id}_qa.json"
|
||||||
|
if not qa_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail="Keine QA-Daten gefunden.")
|
||||||
|
try:
|
||||||
|
from ai_processing.qa_generator import get_next_review_items
|
||||||
|
items = get_next_review_items(qa_path, limit=limit)
|
||||||
|
return {"items": items, "count": len(items)}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
class StoryGeneratePayload(BaseModel):
|
||||||
|
vocabulary: List[Dict[str, Any]]
|
||||||
|
language: str = "en"
|
||||||
|
grade_level: str = "5-8"
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{unit_id}/generate-story")
|
||||||
|
def api_generate_story(unit_id: str, payload: StoryGeneratePayload):
|
||||||
|
"""Generate a short story using vocabulary words."""
|
||||||
|
lu = get_learning_unit(unit_id)
|
||||||
|
if not lu:
|
||||||
|
raise HTTPException(status_code=404, detail="Lerneinheit nicht gefunden.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from story_generator import generate_story
|
||||||
|
result = generate_story(
|
||||||
|
vocabulary=payload.vocabulary,
|
||||||
|
language=payload.language,
|
||||||
|
grade_level=payload.grade_level,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Story generation failed for {unit_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Story-Generierung fehlgeschlagen: {e}")
|
||||||
|
|
||||||
@@ -0,0 +1,149 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit API Models
|
||||||
|
# ==============================================
|
||||||
|
# Pydantic models for the Unit API.
|
||||||
|
# Extracted from unit_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class UnitDefinitionResponse(BaseModel):
|
||||||
|
"""Unit definition response"""
|
||||||
|
unit_id: str
|
||||||
|
template: str
|
||||||
|
version: str
|
||||||
|
locale: List[str]
|
||||||
|
grade_band: List[str]
|
||||||
|
duration_minutes: int
|
||||||
|
difficulty: str
|
||||||
|
definition: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class CreateSessionRequest(BaseModel):
|
||||||
|
"""Request to create a unit session"""
|
||||||
|
unit_id: str
|
||||||
|
student_id: str
|
||||||
|
locale: str = "de-DE"
|
||||||
|
difficulty: str = "base"
|
||||||
|
|
||||||
|
|
||||||
|
class SessionResponse(BaseModel):
|
||||||
|
"""Response after creating a session"""
|
||||||
|
session_id: str
|
||||||
|
unit_definition_url: str
|
||||||
|
session_token: str
|
||||||
|
telemetry_endpoint: str
|
||||||
|
expires_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryEvent(BaseModel):
|
||||||
|
"""Single telemetry event"""
|
||||||
|
ts: Optional[str] = None
|
||||||
|
type: str = Field(..., alias="type")
|
||||||
|
stop_id: Optional[str] = None
|
||||||
|
metrics: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
populate_by_name = True
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryPayload(BaseModel):
|
||||||
|
"""Batch telemetry payload"""
|
||||||
|
session_id: str
|
||||||
|
events: List[TelemetryEvent]
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryResponse(BaseModel):
|
||||||
|
"""Response after receiving telemetry"""
|
||||||
|
accepted: int
|
||||||
|
|
||||||
|
|
||||||
|
class PostcheckAnswer(BaseModel):
|
||||||
|
"""Single postcheck answer"""
|
||||||
|
question_id: str
|
||||||
|
answer: str
|
||||||
|
|
||||||
|
|
||||||
|
class CompleteSessionRequest(BaseModel):
|
||||||
|
"""Request to complete a session"""
|
||||||
|
postcheck_answers: Optional[List[PostcheckAnswer]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SessionSummaryResponse(BaseModel):
|
||||||
|
"""Response with session summary"""
|
||||||
|
summary: Dict[str, Any]
|
||||||
|
next_recommendations: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class UnitListItem(BaseModel):
|
||||||
|
"""Unit list item"""
|
||||||
|
unit_id: str
|
||||||
|
template: str
|
||||||
|
difficulty: str
|
||||||
|
duration_minutes: int
|
||||||
|
locale: List[str]
|
||||||
|
grade_band: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class RecommendedUnit(BaseModel):
|
||||||
|
"""Recommended unit with reason"""
|
||||||
|
unit_id: str
|
||||||
|
template: str
|
||||||
|
difficulty: str
|
||||||
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
|
class CreateUnitRequest(BaseModel):
|
||||||
|
"""Request to create a new unit definition"""
|
||||||
|
unit_id: str = Field(..., description="Unique unit identifier")
|
||||||
|
template: str = Field(..., description="Template type: flight_path or station_loop")
|
||||||
|
version: str = Field(default="1.0.0", description="Version string")
|
||||||
|
locale: List[str] = Field(default=["de-DE"], description="Supported locales")
|
||||||
|
grade_band: List[str] = Field(default=["5", "6", "7"], description="Target grade levels")
|
||||||
|
duration_minutes: int = Field(default=8, ge=3, le=20, description="Expected duration")
|
||||||
|
difficulty: str = Field(default="base", description="Difficulty level: base or advanced")
|
||||||
|
subject: Optional[str] = Field(default=None, description="Subject area")
|
||||||
|
topic: Optional[str] = Field(default=None, description="Topic within subject")
|
||||||
|
learning_objectives: List[str] = Field(default=[], description="Learning objectives")
|
||||||
|
stops: List[Dict[str, Any]] = Field(default=[], description="Unit stops/stations")
|
||||||
|
precheck: Optional[Dict[str, Any]] = Field(default=None, description="Pre-check configuration")
|
||||||
|
postcheck: Optional[Dict[str, Any]] = Field(default=None, description="Post-check configuration")
|
||||||
|
teacher_controls: Optional[Dict[str, Any]] = Field(default=None, description="Teacher control settings")
|
||||||
|
assets: Optional[Dict[str, Any]] = Field(default=None, description="Asset configuration")
|
||||||
|
metadata: Optional[Dict[str, Any]] = Field(default=None, description="Additional metadata")
|
||||||
|
status: str = Field(default="draft", description="Publication status: draft or published")
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateUnitRequest(BaseModel):
|
||||||
|
"""Request to update an existing unit definition"""
|
||||||
|
version: Optional[str] = None
|
||||||
|
locale: Optional[List[str]] = None
|
||||||
|
grade_band: Optional[List[str]] = None
|
||||||
|
duration_minutes: Optional[int] = Field(default=None, ge=3, le=20)
|
||||||
|
difficulty: Optional[str] = None
|
||||||
|
subject: Optional[str] = None
|
||||||
|
topic: Optional[str] = None
|
||||||
|
learning_objectives: Optional[List[str]] = None
|
||||||
|
stops: Optional[List[Dict[str, Any]]] = None
|
||||||
|
precheck: Optional[Dict[str, Any]] = None
|
||||||
|
postcheck: Optional[Dict[str, Any]] = None
|
||||||
|
teacher_controls: Optional[Dict[str, Any]] = None
|
||||||
|
assets: Optional[Dict[str, Any]] = None
|
||||||
|
metadata: Optional[Dict[str, Any]] = None
|
||||||
|
status: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(BaseModel):
|
||||||
|
"""Single validation error"""
|
||||||
|
field: str
|
||||||
|
message: str
|
||||||
|
severity: str = "error" # error or warning
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationResult(BaseModel):
|
||||||
|
"""Result of unit validation"""
|
||||||
|
valid: bool
|
||||||
|
errors: List[ValidationError] = []
|
||||||
|
warnings: List[ValidationError] = []
|
||||||
@@ -0,0 +1,494 @@
|
|||||||
|
# ==============================================
|
||||||
|
# Breakpilot Drive - Unit API Routes
|
||||||
|
# ==============================================
|
||||||
|
# Endpoints for listing/getting definitions, sessions, telemetry,
|
||||||
|
# recommendations, and analytics.
|
||||||
|
# CRUD definition routes are in unit_definition_routes.py.
|
||||||
|
# Extracted from unit_api.py for file-size compliance.
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
UnitDefinitionResponse,
|
||||||
|
CreateSessionRequest,
|
||||||
|
SessionResponse,
|
||||||
|
TelemetryPayload,
|
||||||
|
TelemetryResponse,
|
||||||
|
CompleteSessionRequest,
|
||||||
|
SessionSummaryResponse,
|
||||||
|
UnitListItem,
|
||||||
|
RecommendedUnit,
|
||||||
|
)
|
||||||
|
from .helpers import (
|
||||||
|
get_optional_current_user,
|
||||||
|
get_unit_database,
|
||||||
|
create_session_token,
|
||||||
|
get_session_from_token,
|
||||||
|
REQUIRE_AUTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/units", tags=["Breakpilot Units"])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Definition List/Get Endpoints
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/definitions", response_model=List[UnitListItem])
|
||||||
|
async def list_unit_definitions(
|
||||||
|
template: Optional[str] = Query(None, description="Filter by template: flight_path, station_loop"),
|
||||||
|
grade: Optional[str] = Query(None, description="Filter by grade level"),
|
||||||
|
locale: str = Query("de-DE", description="Filter by locale"),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> List[UnitListItem]:
|
||||||
|
"""
|
||||||
|
List available unit definitions.
|
||||||
|
|
||||||
|
Returns published units matching the filter criteria.
|
||||||
|
"""
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
units = await db.list_units(
|
||||||
|
template=template,
|
||||||
|
grade=grade,
|
||||||
|
locale=locale,
|
||||||
|
published_only=True
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
UnitListItem(
|
||||||
|
unit_id=u["unit_id"],
|
||||||
|
template=u["template"],
|
||||||
|
difficulty=u["difficulty"],
|
||||||
|
duration_minutes=u["duration_minutes"],
|
||||||
|
locale=u["locale"],
|
||||||
|
grade_band=u["grade_band"],
|
||||||
|
)
|
||||||
|
for u in units
|
||||||
|
]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list units: {e}")
|
||||||
|
|
||||||
|
# Fallback: return demo unit
|
||||||
|
return [
|
||||||
|
UnitListItem(
|
||||||
|
unit_id="demo_unit_v1",
|
||||||
|
template="flight_path",
|
||||||
|
difficulty="base",
|
||||||
|
duration_minutes=5,
|
||||||
|
locale=["de-DE"],
|
||||||
|
grade_band=["5", "6", "7"],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/definitions/{unit_id}", response_model=UnitDefinitionResponse)
|
||||||
|
async def get_unit_definition(
|
||||||
|
unit_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> UnitDefinitionResponse:
|
||||||
|
"""
|
||||||
|
Get a specific unit definition.
|
||||||
|
|
||||||
|
Returns the full unit configuration including stops, interactions, etc.
|
||||||
|
"""
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
unit = await db.get_unit_definition(unit_id)
|
||||||
|
if unit:
|
||||||
|
return UnitDefinitionResponse(
|
||||||
|
unit_id=unit["unit_id"],
|
||||||
|
template=unit["template"],
|
||||||
|
version=unit["version"],
|
||||||
|
locale=unit["locale"],
|
||||||
|
grade_band=unit["grade_band"],
|
||||||
|
duration_minutes=unit["duration_minutes"],
|
||||||
|
difficulty=unit["difficulty"],
|
||||||
|
definition=unit["definition"],
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get unit definition: {e}")
|
||||||
|
|
||||||
|
# Demo unit fallback
|
||||||
|
if unit_id == "demo_unit_v1":
|
||||||
|
return UnitDefinitionResponse(
|
||||||
|
unit_id="demo_unit_v1",
|
||||||
|
template="flight_path",
|
||||||
|
version="1.0.0",
|
||||||
|
locale=["de-DE"],
|
||||||
|
grade_band=["5", "6", "7"],
|
||||||
|
duration_minutes=5,
|
||||||
|
difficulty="base",
|
||||||
|
definition={
|
||||||
|
"unit_id": "demo_unit_v1",
|
||||||
|
"template": "flight_path",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"learning_objectives": ["Demo: Grundfunktion testen"],
|
||||||
|
"stops": [
|
||||||
|
{"stop_id": "stop_1", "label": {"de-DE": "Start"}, "interaction": {"type": "aim_and_pass"}},
|
||||||
|
{"stop_id": "stop_2", "label": {"de-DE": "Mitte"}, "interaction": {"type": "aim_and_pass"}},
|
||||||
|
{"stop_id": "stop_3", "label": {"de-DE": "Ende"}, "interaction": {"type": "aim_and_pass"}},
|
||||||
|
],
|
||||||
|
"teacher_controls": {"allow_skip": True, "allow_replay": True},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit not found: {unit_id}")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Session Endpoints
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.post("/sessions", response_model=SessionResponse)
|
||||||
|
async def create_unit_session(
|
||||||
|
request_data: CreateSessionRequest,
|
||||||
|
request: Request,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> SessionResponse:
|
||||||
|
"""
|
||||||
|
Create a new unit session.
|
||||||
|
|
||||||
|
- Validates unit exists
|
||||||
|
- Creates session record
|
||||||
|
- Returns session token for telemetry
|
||||||
|
"""
|
||||||
|
session_id = str(uuid.uuid4())
|
||||||
|
expires_at = datetime.utcnow() + timedelta(hours=4)
|
||||||
|
|
||||||
|
# Validate unit exists
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
unit = await db.get_unit_definition(request_data.unit_id)
|
||||||
|
if not unit:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Unit not found: {request_data.unit_id}")
|
||||||
|
|
||||||
|
# Create session in database
|
||||||
|
total_stops = len(unit.get("definition", {}).get("stops", []))
|
||||||
|
await db.create_session(
|
||||||
|
session_id=session_id,
|
||||||
|
unit_id=request_data.unit_id,
|
||||||
|
student_id=request_data.student_id,
|
||||||
|
locale=request_data.locale,
|
||||||
|
difficulty=request_data.difficulty,
|
||||||
|
total_stops=total_stops,
|
||||||
|
)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create session: {e}")
|
||||||
|
# Continue with in-memory fallback
|
||||||
|
|
||||||
|
# Create session token
|
||||||
|
session_token = create_session_token(session_id, request_data.student_id)
|
||||||
|
|
||||||
|
# Build definition URL
|
||||||
|
base_url = str(request.base_url).rstrip("/")
|
||||||
|
definition_url = f"{base_url}/api/units/definitions/{request_data.unit_id}"
|
||||||
|
|
||||||
|
return SessionResponse(
|
||||||
|
session_id=session_id,
|
||||||
|
unit_definition_url=definition_url,
|
||||||
|
session_token=session_token,
|
||||||
|
telemetry_endpoint="/api/units/telemetry",
|
||||||
|
expires_at=expires_at,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/telemetry", response_model=TelemetryResponse)
|
||||||
|
async def receive_telemetry(
|
||||||
|
payload: TelemetryPayload,
|
||||||
|
request: Request,
|
||||||
|
) -> TelemetryResponse:
|
||||||
|
"""
|
||||||
|
Receive batched telemetry events from Unity client.
|
||||||
|
|
||||||
|
- Validates session token
|
||||||
|
- Stores events in database
|
||||||
|
- Returns count of accepted events
|
||||||
|
"""
|
||||||
|
# Verify session token
|
||||||
|
session_data = await get_session_from_token(request)
|
||||||
|
if session_data is None:
|
||||||
|
# Allow without auth in dev mode
|
||||||
|
if REQUIRE_AUTH:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
||||||
|
logger.warning("Telemetry received without valid token (dev mode)")
|
||||||
|
|
||||||
|
# Verify session_id matches
|
||||||
|
if session_data and session_data.get("session_id") != payload.session_id:
|
||||||
|
raise HTTPException(status_code=403, detail="Session ID mismatch")
|
||||||
|
|
||||||
|
accepted = 0
|
||||||
|
db = await get_unit_database()
|
||||||
|
|
||||||
|
for event in payload.events:
|
||||||
|
try:
|
||||||
|
# Set timestamp if not provided
|
||||||
|
timestamp = event.ts or datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
if db:
|
||||||
|
await db.store_telemetry_event(
|
||||||
|
session_id=payload.session_id,
|
||||||
|
event_type=event.type,
|
||||||
|
stop_id=event.stop_id,
|
||||||
|
timestamp=timestamp,
|
||||||
|
metrics=event.metrics,
|
||||||
|
)
|
||||||
|
|
||||||
|
accepted += 1
|
||||||
|
logger.debug(f"Telemetry: {event.type} for session {payload.session_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to store telemetry event: {e}")
|
||||||
|
|
||||||
|
return TelemetryResponse(accepted=accepted)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sessions/{session_id}/complete", response_model=SessionSummaryResponse)
|
||||||
|
async def complete_session(
|
||||||
|
session_id: str,
|
||||||
|
request_data: CompleteSessionRequest,
|
||||||
|
request: Request,
|
||||||
|
) -> SessionSummaryResponse:
|
||||||
|
"""
|
||||||
|
Complete a unit session.
|
||||||
|
|
||||||
|
- Processes postcheck answers if provided
|
||||||
|
- Calculates learning gain
|
||||||
|
- Returns summary and recommendations
|
||||||
|
"""
|
||||||
|
# Verify session token
|
||||||
|
session_data = await get_session_from_token(request)
|
||||||
|
if REQUIRE_AUTH and session_data is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
||||||
|
|
||||||
|
db = await get_unit_database()
|
||||||
|
summary = {}
|
||||||
|
recommendations = {}
|
||||||
|
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
# Get session data
|
||||||
|
session = await db.get_session(session_id)
|
||||||
|
if not session:
|
||||||
|
raise HTTPException(status_code=404, detail="Session not found")
|
||||||
|
|
||||||
|
# Calculate postcheck score if answers provided
|
||||||
|
postcheck_score = None
|
||||||
|
if request_data.postcheck_answers:
|
||||||
|
# Simple scoring: count correct answers
|
||||||
|
# In production, would validate against question bank
|
||||||
|
postcheck_score = len(request_data.postcheck_answers) * 0.2 # Placeholder
|
||||||
|
postcheck_score = min(postcheck_score, 1.0)
|
||||||
|
|
||||||
|
# Complete session in database
|
||||||
|
await db.complete_session(
|
||||||
|
session_id=session_id,
|
||||||
|
postcheck_score=postcheck_score,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get updated session summary
|
||||||
|
session = await db.get_session(session_id)
|
||||||
|
|
||||||
|
# Calculate learning gain
|
||||||
|
pre_score = session.get("precheck_score")
|
||||||
|
post_score = session.get("postcheck_score")
|
||||||
|
learning_gain = None
|
||||||
|
if pre_score is not None and post_score is not None:
|
||||||
|
learning_gain = post_score - pre_score
|
||||||
|
|
||||||
|
summary = {
|
||||||
|
"session_id": session_id,
|
||||||
|
"unit_id": session.get("unit_id"),
|
||||||
|
"duration_seconds": session.get("duration_seconds"),
|
||||||
|
"completion_rate": session.get("completion_rate"),
|
||||||
|
"precheck_score": pre_score,
|
||||||
|
"postcheck_score": post_score,
|
||||||
|
"pre_to_post_gain": learning_gain,
|
||||||
|
"stops_completed": session.get("stops_completed"),
|
||||||
|
"total_stops": session.get("total_stops"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get recommendations
|
||||||
|
recommendations = await db.get_recommendations(
|
||||||
|
student_id=session.get("student_id"),
|
||||||
|
completed_unit_id=session.get("unit_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to complete session: {e}")
|
||||||
|
summary = {"session_id": session_id, "error": str(e)}
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Fallback summary
|
||||||
|
summary = {
|
||||||
|
"session_id": session_id,
|
||||||
|
"duration_seconds": 0,
|
||||||
|
"completion_rate": 1.0,
|
||||||
|
"message": "Database not available",
|
||||||
|
}
|
||||||
|
|
||||||
|
return SessionSummaryResponse(
|
||||||
|
summary=summary,
|
||||||
|
next_recommendations=recommendations or {
|
||||||
|
"h5p_activity_ids": [],
|
||||||
|
"worksheet_pdf_url": None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/sessions/{session_id}")
|
||||||
|
async def get_session(
|
||||||
|
session_id: str,
|
||||||
|
request: Request,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get session details.
|
||||||
|
|
||||||
|
Returns current state of a session including progress.
|
||||||
|
"""
|
||||||
|
# Verify session token
|
||||||
|
session_data = await get_session_from_token(request)
|
||||||
|
if REQUIRE_AUTH and session_data is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid or expired session token")
|
||||||
|
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
session = await db.get_session(session_id)
|
||||||
|
if session:
|
||||||
|
return session
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get session: {e}")
|
||||||
|
|
||||||
|
raise HTTPException(status_code=404, detail="Session not found")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================================
|
||||||
|
# Recommendations & Analytics
|
||||||
|
# ==============================================
|
||||||
|
|
||||||
|
@router.get("/recommendations/{student_id}", response_model=List[RecommendedUnit])
|
||||||
|
async def get_recommendations(
|
||||||
|
student_id: str,
|
||||||
|
grade: Optional[str] = Query(None, description="Grade level filter"),
|
||||||
|
locale: str = Query("de-DE", description="Locale filter"),
|
||||||
|
limit: int = Query(5, ge=1, le=20),
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> List[RecommendedUnit]:
|
||||||
|
"""
|
||||||
|
Get recommended units for a student.
|
||||||
|
|
||||||
|
Based on completion status and performance.
|
||||||
|
"""
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
recommendations = await db.get_student_recommendations(
|
||||||
|
student_id=student_id,
|
||||||
|
grade=grade,
|
||||||
|
locale=locale,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
RecommendedUnit(
|
||||||
|
unit_id=r["unit_id"],
|
||||||
|
template=r["template"],
|
||||||
|
difficulty=r["difficulty"],
|
||||||
|
reason=r["reason"],
|
||||||
|
)
|
||||||
|
for r in recommendations
|
||||||
|
]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get recommendations: {e}")
|
||||||
|
|
||||||
|
# Fallback: recommend demo unit
|
||||||
|
return [
|
||||||
|
RecommendedUnit(
|
||||||
|
unit_id="demo_unit_v1",
|
||||||
|
template="flight_path",
|
||||||
|
difficulty="base",
|
||||||
|
reason="Neu: Noch nicht gespielt",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/analytics/student/{student_id}")
|
||||||
|
async def get_student_analytics(
|
||||||
|
student_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get unit analytics for a student.
|
||||||
|
|
||||||
|
Includes completion rates, learning gains, time spent.
|
||||||
|
"""
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
analytics = await db.get_student_unit_analytics(student_id)
|
||||||
|
return analytics
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get analytics: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"student_id": student_id,
|
||||||
|
"units_attempted": 0,
|
||||||
|
"units_completed": 0,
|
||||||
|
"avg_completion_rate": 0.0,
|
||||||
|
"avg_learning_gain": None,
|
||||||
|
"total_minutes": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/analytics/unit/{unit_id}")
|
||||||
|
async def get_unit_analytics(
|
||||||
|
unit_id: str,
|
||||||
|
user: Optional[Dict[str, Any]] = Depends(get_optional_current_user)
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get analytics for a specific unit.
|
||||||
|
|
||||||
|
Shows aggregate performance across all students.
|
||||||
|
"""
|
||||||
|
db = await get_unit_database()
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
analytics = await db.get_unit_performance(unit_id)
|
||||||
|
return analytics
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get unit analytics: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"unit_id": unit_id,
|
||||||
|
"total_sessions": 0,
|
||||||
|
"completed_sessions": 0,
|
||||||
|
"completion_percent": 0.0,
|
||||||
|
"avg_duration_minutes": 0,
|
||||||
|
"avg_learning_gain": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check() -> Dict[str, Any]:
|
||||||
|
"""Health check for unit API."""
|
||||||
|
db = await get_unit_database()
|
||||||
|
db_status = "connected" if db else "disconnected"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"service": "breakpilot-units",
|
||||||
|
"database": db_status,
|
||||||
|
"auth_required": REQUIRE_AUTH,
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user