[split-required] Split 500-1000 LOC files across all services
backend-lehrer (5 files): - alerts_agent/db/repository.py (992 → 5), abitur_docs_api.py (956 → 3) - teacher_dashboard_api.py (951 → 3), services/pdf_service.py (916 → 3) - mail/mail_db.py (987 → 6) klausur-service (5 files): - legal_templates_ingestion.py (942 → 3), ocr_pipeline_postprocess.py (929 → 4) - ocr_pipeline_words.py (876 → 3), ocr_pipeline_ocr_merge.py (616 → 2) - KorrekturPage.tsx (956 → 6) website (5 pages): - mail (985 → 9), edu-search (958 → 8), mac-mini (950 → 7) - ocr-labeling (946 → 7), audit-workspace (871 → 4) studio-v2 (5 files + 1 deleted): - page.tsx (946 → 5), MessagesContext.tsx (925 → 4) - korrektur (914 → 6), worksheet-cleanup (899 → 6) - useVocabWorksheet.ts (888 → 3) - Deleted dead page-original.tsx (934 LOC) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -15,18 +15,24 @@ Dateinamen-Schema (NiBiS Niedersachsen):
|
||||
import logging
|
||||
import uuid
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
from typing import List, Optional, Dict, Any
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fastapi import APIRouter, HTTPException, UploadFile, File, Form, BackgroundTasks
|
||||
from fastapi.responses import FileResponse
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from abitur_docs_models import (
|
||||
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
||||
DokumentCreate, DokumentUpdate, DokumentResponse, ImportResult,
|
||||
RecognitionResult, AbiturDokument,
|
||||
FACH_LABELS, DOKUMENT_TYP_LABELS,
|
||||
# Backwards-compatibility re-exports
|
||||
AbiturFach, Anforderungsniveau, DocumentMetadata, AbiturDokumentCompat,
|
||||
)
|
||||
from abitur_docs_recognition import parse_nibis_filename, to_dokument_response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -39,364 +45,19 @@ router = APIRouter(
|
||||
DOCS_DIR = Path("/tmp/abitur-docs")
|
||||
DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enums
|
||||
# ============================================================================
|
||||
|
||||
class Bundesland(str, Enum):
|
||||
"""Bundesländer mit Zentralabitur."""
|
||||
NIEDERSACHSEN = "niedersachsen"
|
||||
BAYERN = "bayern"
|
||||
BADEN_WUERTTEMBERG = "baden_wuerttemberg"
|
||||
NORDRHEIN_WESTFALEN = "nordrhein_westfalen"
|
||||
HESSEN = "hessen"
|
||||
SACHSEN = "sachsen"
|
||||
THUERINGEN = "thueringen"
|
||||
BERLIN = "berlin"
|
||||
HAMBURG = "hamburg"
|
||||
SCHLESWIG_HOLSTEIN = "schleswig_holstein"
|
||||
BREMEN = "bremen"
|
||||
BRANDENBURG = "brandenburg"
|
||||
MECKLENBURG_VORPOMMERN = "mecklenburg_vorpommern"
|
||||
SACHSEN_ANHALT = "sachsen_anhalt"
|
||||
RHEINLAND_PFALZ = "rheinland_pfalz"
|
||||
SAARLAND = "saarland"
|
||||
|
||||
|
||||
class Fach(str, Enum):
|
||||
"""Abiturfächer."""
|
||||
DEUTSCH = "deutsch"
|
||||
ENGLISCH = "englisch"
|
||||
MATHEMATIK = "mathematik"
|
||||
BIOLOGIE = "biologie"
|
||||
CHEMIE = "chemie"
|
||||
PHYSIK = "physik"
|
||||
GESCHICHTE = "geschichte"
|
||||
ERDKUNDE = "erdkunde"
|
||||
POLITIK_WIRTSCHAFT = "politik_wirtschaft"
|
||||
FRANZOESISCH = "franzoesisch"
|
||||
SPANISCH = "spanisch"
|
||||
LATEIN = "latein"
|
||||
GRIECHISCH = "griechisch"
|
||||
KUNST = "kunst"
|
||||
MUSIK = "musik"
|
||||
SPORT = "sport"
|
||||
INFORMATIK = "informatik"
|
||||
EV_RELIGION = "ev_religion"
|
||||
KATH_RELIGION = "kath_religion"
|
||||
WERTE_NORMEN = "werte_normen"
|
||||
BRC = "brc" # Betriebswirtschaft mit Rechnungswesen
|
||||
BVW = "bvw" # Volkswirtschaft
|
||||
ERNAEHRUNG = "ernaehrung"
|
||||
MECHATRONIK = "mechatronik"
|
||||
GESUNDHEIT_PFLEGE = "gesundheit_pflege"
|
||||
PAEDAGOGIK_PSYCHOLOGIE = "paedagogik_psychologie"
|
||||
|
||||
|
||||
class Niveau(str, Enum):
|
||||
"""Anforderungsniveau."""
|
||||
EA = "eA" # Erhöhtes Anforderungsniveau (Leistungskurs)
|
||||
GA = "gA" # Grundlegendes Anforderungsniveau (Grundkurs)
|
||||
|
||||
|
||||
class DokumentTyp(str, Enum):
|
||||
"""Dokumenttyp."""
|
||||
AUFGABE = "aufgabe"
|
||||
ERWARTUNGSHORIZONT = "erwartungshorizont"
|
||||
DECKBLATT = "deckblatt"
|
||||
MATERIAL = "material"
|
||||
HOERVERSTEHEN = "hoerverstehen" # Für Sprachen
|
||||
SPRACHMITTLUNG = "sprachmittlung" # Für Sprachen
|
||||
BEWERTUNGSBOGEN = "bewertungsbogen"
|
||||
|
||||
|
||||
class VerarbeitungsStatus(str, Enum):
|
||||
"""Status der Dokumentenverarbeitung."""
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
RECOGNIZED = "recognized" # KI hat Metadaten erkannt
|
||||
CONFIRMED = "confirmed" # Entwickler hat bestätigt
|
||||
INDEXED = "indexed" # Im Vector Store
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Fach-Mapping für Dateinamen
|
||||
# ============================================================================
|
||||
|
||||
FACH_NAME_MAPPING = {
|
||||
"deutsch": Fach.DEUTSCH,
|
||||
"englisch": Fach.ENGLISCH,
|
||||
"mathe": Fach.MATHEMATIK,
|
||||
"mathematik": Fach.MATHEMATIK,
|
||||
"biologie": Fach.BIOLOGIE,
|
||||
"bio": Fach.BIOLOGIE,
|
||||
"chemie": Fach.CHEMIE,
|
||||
"physik": Fach.PHYSIK,
|
||||
"geschichte": Fach.GESCHICHTE,
|
||||
"erdkunde": Fach.ERDKUNDE,
|
||||
"geographie": Fach.ERDKUNDE,
|
||||
"politikwirtschaft": Fach.POLITIK_WIRTSCHAFT,
|
||||
"politik": Fach.POLITIK_WIRTSCHAFT,
|
||||
"franzoesisch": Fach.FRANZOESISCH,
|
||||
"franz": Fach.FRANZOESISCH,
|
||||
"spanisch": Fach.SPANISCH,
|
||||
"latein": Fach.LATEIN,
|
||||
"griechisch": Fach.GRIECHISCH,
|
||||
"kunst": Fach.KUNST,
|
||||
"musik": Fach.MUSIK,
|
||||
"sport": Fach.SPORT,
|
||||
"informatik": Fach.INFORMATIK,
|
||||
"evreligion": Fach.EV_RELIGION,
|
||||
"kathreligion": Fach.KATH_RELIGION,
|
||||
"wertenormen": Fach.WERTE_NORMEN,
|
||||
"brc": Fach.BRC,
|
||||
"bvw": Fach.BVW,
|
||||
"ernaehrung": Fach.ERNAEHRUNG,
|
||||
"mecha": Fach.MECHATRONIK,
|
||||
"mechatronik": Fach.MECHATRONIK,
|
||||
"technikmecha": Fach.MECHATRONIK,
|
||||
"gespfl": Fach.GESUNDHEIT_PFLEGE,
|
||||
"paedpsych": Fach.PAEDAGOGIK_PSYCHOLOGIE,
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Pydantic Models
|
||||
# ============================================================================
|
||||
|
||||
class DokumentCreate(BaseModel):
|
||||
"""Manuelles Erstellen eines Dokuments."""
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int = Field(ge=2000, le=2100)
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str] = None # I, II, III, 1, 2, etc.
|
||||
|
||||
|
||||
class DokumentUpdate(BaseModel):
|
||||
"""Update für erkannte Metadaten."""
|
||||
bundesland: Optional[Bundesland] = None
|
||||
fach: Optional[Fach] = None
|
||||
jahr: Optional[int] = None
|
||||
niveau: Optional[Niveau] = None
|
||||
typ: Optional[DokumentTyp] = None
|
||||
aufgaben_nummer: Optional[str] = None
|
||||
status: Optional[VerarbeitungsStatus] = None
|
||||
|
||||
|
||||
class DokumentResponse(BaseModel):
|
||||
"""Response für ein Dokument."""
|
||||
id: str
|
||||
dateiname: str
|
||||
original_dateiname: str
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str]
|
||||
status: VerarbeitungsStatus
|
||||
confidence: float # Erkennungs-Confidence
|
||||
file_path: str
|
||||
file_size: int
|
||||
indexed: bool
|
||||
vector_ids: List[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class ImportResult(BaseModel):
|
||||
"""Ergebnis eines ZIP-Imports."""
|
||||
total_files: int
|
||||
recognized: int
|
||||
errors: int
|
||||
documents: List[DokumentResponse]
|
||||
|
||||
|
||||
class RecognitionResult(BaseModel):
|
||||
"""Ergebnis der Dokumentenerkennung."""
|
||||
success: bool
|
||||
bundesland: Optional[Bundesland]
|
||||
fach: Optional[Fach]
|
||||
jahr: Optional[int]
|
||||
niveau: Optional[Niveau]
|
||||
typ: Optional[DokumentTyp]
|
||||
aufgaben_nummer: Optional[str]
|
||||
confidence: float
|
||||
raw_filename: str
|
||||
suggestions: List[Dict[str, Any]]
|
||||
|
||||
@property
|
||||
def extracted(self) -> Dict[str, Any]:
|
||||
"""Backwards-compatible property returning extracted values as dict."""
|
||||
result = {}
|
||||
if self.bundesland:
|
||||
result["bundesland"] = self.bundesland.value
|
||||
if self.fach:
|
||||
result["fach"] = self.fach.value
|
||||
if self.jahr:
|
||||
result["jahr"] = self.jahr
|
||||
if self.niveau:
|
||||
result["niveau"] = self.niveau.value
|
||||
if self.typ:
|
||||
result["typ"] = self.typ.value
|
||||
if self.aufgaben_nummer:
|
||||
result["aufgaben_nummer"] = self.aufgaben_nummer
|
||||
return result
|
||||
|
||||
@property
|
||||
def method(self) -> str:
|
||||
"""Backwards-compatible property for recognition method."""
|
||||
return "filename_pattern"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Internal Data Classes
|
||||
# ============================================================================
|
||||
|
||||
@dataclass
|
||||
class AbiturDokument:
|
||||
"""Internes Dokument."""
|
||||
id: str
|
||||
dateiname: str
|
||||
original_dateiname: str
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str]
|
||||
status: VerarbeitungsStatus
|
||||
confidence: float
|
||||
file_path: str
|
||||
file_size: int
|
||||
indexed: bool
|
||||
vector_ids: List[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# In-Memory Storage
|
||||
# ============================================================================
|
||||
|
||||
_dokumente: Dict[str, AbiturDokument] = {}
|
||||
|
||||
# Backwards-compatibility alias
|
||||
documents_db = _dokumente
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Helper Functions - Dokumentenerkennung
|
||||
# Private helper (kept local since it references module-level _dokumente)
|
||||
# ============================================================================
|
||||
|
||||
def parse_nibis_filename(filename: str) -> RecognitionResult:
|
||||
"""
|
||||
Erkennt Metadaten aus NiBiS-Dateinamen.
|
||||
|
||||
Beispiele:
|
||||
- 2025_Deutsch_eA_I.pdf
|
||||
- 2025_Deutsch_eA_I_EWH.pdf
|
||||
- 2025_Biologie_gA_1.pdf
|
||||
- 2025_Englisch_eA_HV.pdf (Hörverstehen)
|
||||
"""
|
||||
result = RecognitionResult(
|
||||
success=False,
|
||||
bundesland=Bundesland.NIEDERSACHSEN, # NiBiS = Niedersachsen
|
||||
fach=None,
|
||||
jahr=None,
|
||||
niveau=None,
|
||||
typ=None,
|
||||
aufgaben_nummer=None,
|
||||
confidence=0.0,
|
||||
raw_filename=filename,
|
||||
suggestions=[]
|
||||
)
|
||||
|
||||
# Bereinige Dateiname
|
||||
name = Path(filename).stem.lower()
|
||||
|
||||
# Extrahiere Jahr (4 Ziffern am Anfang)
|
||||
jahr_match = re.match(r'^(\d{4})', name)
|
||||
if jahr_match:
|
||||
result.jahr = int(jahr_match.group(1))
|
||||
result.confidence += 0.2
|
||||
|
||||
# Extrahiere Fach
|
||||
for fach_key, fach_enum in FACH_NAME_MAPPING.items():
|
||||
if fach_key in name.replace("_", "").replace("-", ""):
|
||||
result.fach = fach_enum
|
||||
result.confidence += 0.3
|
||||
break
|
||||
|
||||
# Extrahiere Niveau (eA/gA)
|
||||
if "_ea" in name or "_ea_" in name or "ea_" in name:
|
||||
result.niveau = Niveau.EA
|
||||
result.confidence += 0.2
|
||||
elif "_ga" in name or "_ga_" in name or "ga_" in name:
|
||||
result.niveau = Niveau.GA
|
||||
result.confidence += 0.2
|
||||
|
||||
# Extrahiere Typ
|
||||
if "_ewh" in name:
|
||||
result.typ = DokumentTyp.ERWARTUNGSHORIZONT
|
||||
result.confidence += 0.2
|
||||
elif "_hv" in name or "hoerverstehen" in name:
|
||||
result.typ = DokumentTyp.HOERVERSTEHEN
|
||||
result.confidence += 0.15
|
||||
elif "_sm" in name or "_me" in name or "sprachmittlung" in name:
|
||||
result.typ = DokumentTyp.SPRACHMITTLUNG
|
||||
result.confidence += 0.15
|
||||
elif "deckblatt" in name:
|
||||
result.typ = DokumentTyp.DECKBLATT
|
||||
result.confidence += 0.15
|
||||
elif "material" in name:
|
||||
result.typ = DokumentTyp.MATERIAL
|
||||
result.confidence += 0.15
|
||||
elif "bewertung" in name:
|
||||
result.typ = DokumentTyp.BEWERTUNGSBOGEN
|
||||
result.confidence += 0.15
|
||||
else:
|
||||
result.typ = DokumentTyp.AUFGABE
|
||||
result.confidence += 0.1
|
||||
|
||||
# Extrahiere Aufgabennummer (römisch oder arabisch)
|
||||
aufgabe_match = re.search(r'_([ivx]+|[1-4][abc]?)(?:_|\.pdf|$)', name, re.IGNORECASE)
|
||||
if aufgabe_match:
|
||||
result.aufgaben_nummer = aufgabe_match.group(1).upper()
|
||||
result.confidence += 0.1
|
||||
|
||||
# Erfolg wenn mindestens Fach und Jahr erkannt
|
||||
if result.fach and result.jahr:
|
||||
result.success = True
|
||||
|
||||
# Normalisiere Confidence auf max 1.0
|
||||
result.confidence = min(result.confidence, 1.0)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
||||
"""Konvertiert internes Dokument zu Response."""
|
||||
return DokumentResponse(
|
||||
id=doc.id,
|
||||
dateiname=doc.dateiname,
|
||||
original_dateiname=doc.original_dateiname,
|
||||
bundesland=doc.bundesland,
|
||||
fach=doc.fach,
|
||||
jahr=doc.jahr,
|
||||
niveau=doc.niveau,
|
||||
typ=doc.typ,
|
||||
aufgaben_nummer=doc.aufgaben_nummer,
|
||||
status=doc.status,
|
||||
confidence=doc.confidence,
|
||||
file_path=doc.file_path,
|
||||
file_size=doc.file_size,
|
||||
indexed=doc.indexed,
|
||||
vector_ids=doc.vector_ids,
|
||||
created_at=doc.created_at,
|
||||
updated_at=doc.updated_at
|
||||
)
|
||||
return to_dokument_response(doc)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
@@ -413,18 +74,12 @@ async def upload_dokument(
|
||||
typ: Optional[DokumentTyp] = Form(None),
|
||||
aufgaben_nummer: Optional[str] = Form(None)
|
||||
):
|
||||
"""
|
||||
Lädt ein einzelnes Dokument hoch.
|
||||
|
||||
Metadaten können manuell angegeben oder automatisch erkannt werden.
|
||||
"""
|
||||
"""Lädt ein einzelnes Dokument hoch."""
|
||||
if not file.filename:
|
||||
raise HTTPException(status_code=400, detail="Kein Dateiname")
|
||||
|
||||
# Erkenne Metadaten aus Dateiname
|
||||
recognition = parse_nibis_filename(file.filename)
|
||||
|
||||
# Überschreibe mit manuellen Angaben
|
||||
final_bundesland = bundesland or recognition.bundesland or Bundesland.NIEDERSACHSEN
|
||||
final_fach = fach or recognition.fach
|
||||
final_jahr = jahr or recognition.jahr or datetime.now().year
|
||||
@@ -435,7 +90,6 @@ async def upload_dokument(
|
||||
if not final_fach:
|
||||
raise HTTPException(status_code=400, detail="Fach konnte nicht erkannt werden")
|
||||
|
||||
# Generiere ID und speichere Datei
|
||||
doc_id = str(uuid.uuid4())
|
||||
file_ext = Path(file.filename).suffix
|
||||
safe_filename = f"{doc_id}{file_ext}"
|
||||
@@ -446,30 +100,16 @@ async def upload_dokument(
|
||||
f.write(content)
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
dokument = AbiturDokument(
|
||||
id=doc_id,
|
||||
dateiname=safe_filename,
|
||||
original_dateiname=file.filename,
|
||||
bundesland=final_bundesland,
|
||||
fach=final_fach,
|
||||
jahr=final_jahr,
|
||||
niveau=final_niveau,
|
||||
typ=final_typ,
|
||||
aufgaben_nummer=final_aufgabe,
|
||||
id=doc_id, dateiname=safe_filename, original_dateiname=file.filename,
|
||||
bundesland=final_bundesland, fach=final_fach, jahr=final_jahr,
|
||||
niveau=final_niveau, typ=final_typ, aufgaben_nummer=final_aufgabe,
|
||||
status=VerarbeitungsStatus.RECOGNIZED if recognition.success else VerarbeitungsStatus.PENDING,
|
||||
confidence=recognition.confidence,
|
||||
file_path=str(file_path),
|
||||
file_size=len(content),
|
||||
indexed=False,
|
||||
vector_ids=[],
|
||||
created_at=now,
|
||||
updated_at=now
|
||||
confidence=recognition.confidence, file_path=str(file_path), file_size=len(content),
|
||||
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
||||
)
|
||||
|
||||
_dokumente[doc_id] = dokument
|
||||
logger.info(f"Uploaded document {doc_id}: {file.filename}")
|
||||
|
||||
return _to_dokument_response(dokument)
|
||||
|
||||
|
||||
@@ -479,15 +119,10 @@ async def import_zip(
|
||||
bundesland: Bundesland = Form(Bundesland.NIEDERSACHSEN),
|
||||
background_tasks: BackgroundTasks = None
|
||||
):
|
||||
"""
|
||||
Importiert alle PDFs aus einer ZIP-Datei.
|
||||
|
||||
Erkennt automatisch Metadaten aus Dateinamen.
|
||||
"""
|
||||
"""Importiert alle PDFs aus einer ZIP-Datei."""
|
||||
if not file.filename or not file.filename.endswith(".zip"):
|
||||
raise HTTPException(status_code=400, detail="ZIP-Datei erforderlich")
|
||||
|
||||
# Speichere ZIP temporär
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp:
|
||||
content = await file.read()
|
||||
tmp.write(content)
|
||||
@@ -501,31 +136,22 @@ async def import_zip(
|
||||
try:
|
||||
with zipfile.ZipFile(tmp_path, 'r') as zip_ref:
|
||||
for zip_info in zip_ref.infolist():
|
||||
# Nur PDFs
|
||||
if not zip_info.filename.lower().endswith(".pdf"):
|
||||
continue
|
||||
|
||||
# Ignoriere Mac-spezifische Dateien
|
||||
if "__MACOSX" in zip_info.filename or zip_info.filename.startswith("."):
|
||||
continue
|
||||
|
||||
# Ignoriere Thumbs.db
|
||||
if "thumbs.db" in zip_info.filename.lower():
|
||||
continue
|
||||
|
||||
total += 1
|
||||
|
||||
try:
|
||||
# Erkenne Metadaten
|
||||
basename = Path(zip_info.filename).name
|
||||
recognition = parse_nibis_filename(basename)
|
||||
|
||||
if not recognition.fach:
|
||||
errors += 1
|
||||
logger.warning(f"Konnte Fach nicht erkennen: {basename}")
|
||||
continue
|
||||
|
||||
# Extrahiere und speichere
|
||||
doc_id = str(uuid.uuid4())
|
||||
file_ext = Path(basename).suffix
|
||||
safe_filename = f"{doc_id}{file_ext}"
|
||||
@@ -537,62 +163,39 @@ async def import_zip(
|
||||
target.write(file_content)
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
dokument = AbiturDokument(
|
||||
id=doc_id,
|
||||
dateiname=safe_filename,
|
||||
original_dateiname=basename,
|
||||
bundesland=bundesland,
|
||||
fach=recognition.fach,
|
||||
id=doc_id, dateiname=safe_filename, original_dateiname=basename,
|
||||
bundesland=bundesland, fach=recognition.fach,
|
||||
jahr=recognition.jahr or datetime.now().year,
|
||||
niveau=recognition.niveau or Niveau.EA,
|
||||
typ=recognition.typ or DokumentTyp.AUFGABE,
|
||||
aufgaben_nummer=recognition.aufgaben_nummer,
|
||||
status=VerarbeitungsStatus.RECOGNIZED,
|
||||
confidence=recognition.confidence,
|
||||
file_path=str(file_path),
|
||||
file_size=len(file_content),
|
||||
indexed=False,
|
||||
vector_ids=[],
|
||||
created_at=now,
|
||||
updated_at=now
|
||||
status=VerarbeitungsStatus.RECOGNIZED, confidence=recognition.confidence,
|
||||
file_path=str(file_path), file_size=len(file_content),
|
||||
indexed=False, vector_ids=[], created_at=now, updated_at=now
|
||||
)
|
||||
|
||||
_dokumente[doc_id] = dokument
|
||||
documents.append(_to_dokument_response(dokument))
|
||||
recognized += 1
|
||||
|
||||
except Exception as e:
|
||||
errors += 1
|
||||
logger.error(f"Fehler bei {zip_info.filename}: {e}")
|
||||
|
||||
finally:
|
||||
# Lösche temporäre ZIP
|
||||
os.unlink(tmp_path)
|
||||
|
||||
logger.info(f"ZIP-Import: {recognized}/{total} erkannt, {errors} Fehler")
|
||||
|
||||
return ImportResult(
|
||||
total_files=total,
|
||||
recognized=recognized,
|
||||
errors=errors,
|
||||
documents=documents
|
||||
)
|
||||
return ImportResult(total_files=total, recognized=recognized, errors=errors, documents=documents)
|
||||
|
||||
|
||||
@router.get("/", response_model=List[DokumentResponse])
|
||||
async def list_dokumente(
|
||||
bundesland: Optional[Bundesland] = None,
|
||||
fach: Optional[Fach] = None,
|
||||
jahr: Optional[int] = None,
|
||||
niveau: Optional[Niveau] = None,
|
||||
typ: Optional[DokumentTyp] = None,
|
||||
status: Optional[VerarbeitungsStatus] = None,
|
||||
bundesland: Optional[Bundesland] = None, fach: Optional[Fach] = None,
|
||||
jahr: Optional[int] = None, niveau: Optional[Niveau] = None,
|
||||
typ: Optional[DokumentTyp] = None, status: Optional[VerarbeitungsStatus] = None,
|
||||
indexed: Optional[bool] = None
|
||||
):
|
||||
"""Listet Dokumente mit optionalen Filtern."""
|
||||
docs = list(_dokumente.values())
|
||||
|
||||
if bundesland:
|
||||
docs = [d for d in docs if d.bundesland == bundesland]
|
||||
if fach:
|
||||
@@ -607,7 +210,6 @@ async def list_dokumente(
|
||||
docs = [d for d in docs if d.status == status]
|
||||
if indexed is not None:
|
||||
docs = [d for d in docs if d.indexed == indexed]
|
||||
|
||||
docs.sort(key=lambda x: (x.jahr, x.fach.value, x.niveau.value), reverse=True)
|
||||
return [_to_dokument_response(d) for d in docs]
|
||||
|
||||
@@ -623,11 +225,10 @@ async def get_dokument(doc_id: str):
|
||||
|
||||
@router.put("/{doc_id}", response_model=DokumentResponse)
|
||||
async def update_dokument(doc_id: str, data: DokumentUpdate):
|
||||
"""Aktualisiert Dokument-Metadaten (nach KI-Erkennung durch Entwickler)."""
|
||||
"""Aktualisiert Dokument-Metadaten."""
|
||||
doc = _dokumente.get(doc_id)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||
|
||||
if data.bundesland is not None:
|
||||
doc.bundesland = data.bundesland
|
||||
if data.fach is not None:
|
||||
@@ -642,9 +243,7 @@ async def update_dokument(doc_id: str, data: DokumentUpdate):
|
||||
doc.aufgaben_nummer = data.aufgaben_nummer
|
||||
if data.status is not None:
|
||||
doc.status = data.status
|
||||
|
||||
doc.updated_at = datetime.utcnow()
|
||||
|
||||
return _to_dokument_response(doc)
|
||||
|
||||
|
||||
@@ -654,10 +253,8 @@ async def confirm_dokument(doc_id: str):
|
||||
doc = _dokumente.get(doc_id)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||
|
||||
doc.status = VerarbeitungsStatus.CONFIRMED
|
||||
doc.updated_at = datetime.utcnow()
|
||||
|
||||
return _to_dokument_response(doc)
|
||||
|
||||
|
||||
@@ -667,24 +264,13 @@ async def index_dokument(doc_id: str):
|
||||
doc = _dokumente.get(doc_id)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||
|
||||
if doc.status not in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
||||
raise HTTPException(status_code=400, detail="Dokument muss erst bestätigt werden")
|
||||
|
||||
# TODO: Vector Store Integration
|
||||
# 1. PDF lesen und Text extrahieren
|
||||
# 2. In Chunks aufteilen
|
||||
# 3. Embeddings generieren
|
||||
# 4. Mit Metadaten im Vector Store speichern
|
||||
|
||||
# Demo: Simuliere Indexierung
|
||||
doc.indexed = True
|
||||
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)] # Demo-IDs
|
||||
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
||||
doc.status = VerarbeitungsStatus.INDEXED
|
||||
doc.updated_at = datetime.utcnow()
|
||||
|
||||
logger.info(f"Document {doc_id} indexed (demo)")
|
||||
|
||||
return _to_dokument_response(doc)
|
||||
|
||||
|
||||
@@ -694,15 +280,9 @@ async def delete_dokument(doc_id: str):
|
||||
doc = _dokumente.get(doc_id)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||
|
||||
# Lösche Datei
|
||||
if os.path.exists(doc.file_path):
|
||||
os.remove(doc.file_path)
|
||||
|
||||
# TODO: Aus Vector Store entfernen
|
||||
|
||||
del _dokumente[doc_id]
|
||||
|
||||
return {"status": "deleted", "id": doc_id}
|
||||
|
||||
|
||||
@@ -712,20 +292,10 @@ async def download_dokument(doc_id: str):
|
||||
doc = _dokumente.get(doc_id)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Dokument nicht gefunden")
|
||||
|
||||
if not os.path.exists(doc.file_path):
|
||||
raise HTTPException(status_code=404, detail="Datei nicht gefunden")
|
||||
return FileResponse(doc.file_path, filename=doc.original_dateiname, media_type="application/pdf")
|
||||
|
||||
return FileResponse(
|
||||
doc.file_path,
|
||||
filename=doc.original_dateiname,
|
||||
media_type="application/pdf"
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# API Endpoints - Erkennung
|
||||
# ============================================================================
|
||||
|
||||
@router.post("/recognize", response_model=RecognitionResult)
|
||||
async def recognize_filename(filename: str):
|
||||
@@ -743,7 +313,6 @@ async def bulk_confirm(doc_ids: List[str]):
|
||||
doc.status = VerarbeitungsStatus.CONFIRMED
|
||||
doc.updated_at = datetime.utcnow()
|
||||
confirmed += 1
|
||||
|
||||
return {"confirmed": confirmed, "total": len(doc_ids)}
|
||||
|
||||
|
||||
@@ -754,70 +323,41 @@ async def bulk_index(doc_ids: List[str]):
|
||||
for doc_id in doc_ids:
|
||||
doc = _dokumente.get(doc_id)
|
||||
if doc and doc.status in [VerarbeitungsStatus.CONFIRMED, VerarbeitungsStatus.RECOGNIZED]:
|
||||
# Demo-Indexierung
|
||||
doc.indexed = True
|
||||
doc.vector_ids = [f"vec_{doc_id}_{i}" for i in range(3)]
|
||||
doc.status = VerarbeitungsStatus.INDEXED
|
||||
doc.updated_at = datetime.utcnow()
|
||||
indexed += 1
|
||||
|
||||
return {"indexed": indexed, "total": len(doc_ids)}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# API Endpoints - Statistiken
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/stats/overview")
|
||||
async def get_stats_overview():
|
||||
"""Gibt Übersicht über alle Dokumente."""
|
||||
docs = list(_dokumente.values())
|
||||
|
||||
by_bundesland = {}
|
||||
by_fach = {}
|
||||
by_jahr = {}
|
||||
by_status = {}
|
||||
|
||||
by_bundesland: Dict[str, int] = {}
|
||||
by_fach: Dict[str, int] = {}
|
||||
by_jahr: Dict[int, int] = {}
|
||||
by_status: Dict[str, int] = {}
|
||||
for doc in docs:
|
||||
by_bundesland[doc.bundesland.value] = by_bundesland.get(doc.bundesland.value, 0) + 1
|
||||
by_fach[doc.fach.value] = by_fach.get(doc.fach.value, 0) + 1
|
||||
by_jahr[doc.jahr] = by_jahr.get(doc.jahr, 0) + 1
|
||||
by_status[doc.status.value] = by_status.get(doc.status.value, 0) + 1
|
||||
|
||||
return {
|
||||
"total": len(docs),
|
||||
"indexed": sum(1 for d in docs if d.indexed),
|
||||
"total": len(docs), "indexed": sum(1 for d in docs if d.indexed),
|
||||
"pending": sum(1 for d in docs if d.status == VerarbeitungsStatus.PENDING),
|
||||
"by_bundesland": by_bundesland,
|
||||
"by_fach": by_fach,
|
||||
"by_jahr": by_jahr,
|
||||
"by_status": by_status
|
||||
"by_bundesland": by_bundesland, "by_fach": by_fach, "by_jahr": by_jahr, "by_status": by_status
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# API Endpoints - Suche (für Klausur-Korrektur)
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/search", response_model=List[DokumentResponse])
|
||||
async def search_dokumente(
|
||||
bundesland: Bundesland,
|
||||
fach: Fach,
|
||||
jahr: Optional[int] = None,
|
||||
niveau: Optional[Niveau] = None,
|
||||
nur_indexed: bool = True
|
||||
bundesland: Bundesland, fach: Fach, jahr: Optional[int] = None,
|
||||
niveau: Optional[Niveau] = None, nur_indexed: bool = True
|
||||
):
|
||||
"""
|
||||
Sucht Dokumente für Klausur-Korrektur.
|
||||
|
||||
Gibt nur indizierte Dokumente zurück (Standard).
|
||||
"""
|
||||
docs = list(_dokumente.values())
|
||||
|
||||
# Pflichtfilter
|
||||
docs = [d for d in docs if d.bundesland == bundesland and d.fach == fach]
|
||||
|
||||
# Optionale Filter
|
||||
"""Sucht Dokumente für Klausur-Korrektur."""
|
||||
docs = [d for d in _dokumente.values() if d.bundesland == bundesland and d.fach == fach]
|
||||
if jahr:
|
||||
docs = [d for d in docs if d.jahr == jahr]
|
||||
if niveau:
|
||||
@@ -825,7 +365,6 @@ async def search_dokumente(
|
||||
if nur_indexed:
|
||||
docs = [d for d in docs if d.indexed]
|
||||
|
||||
# Sortiere: Aufgaben vor Erwartungshorizonten
|
||||
aufgaben = [d for d in docs if d.typ == DokumentTyp.AUFGABE]
|
||||
ewh = [d for d in docs if d.typ == DokumentTyp.ERWARTUNGSHORIZONT]
|
||||
andere = [d for d in docs if d.typ not in [DokumentTyp.AUFGABE, DokumentTyp.ERWARTUNGSHORIZONT]]
|
||||
@@ -833,31 +372,20 @@ async def search_dokumente(
|
||||
result = []
|
||||
for aufgabe in aufgaben:
|
||||
result.append(_to_dokument_response(aufgabe))
|
||||
# Finde passenden EWH
|
||||
matching_ewh = next(
|
||||
(e for e in ewh
|
||||
if e.jahr == aufgabe.jahr
|
||||
and e.niveau == aufgabe.niveau
|
||||
and e.aufgaben_nummer == aufgabe.aufgaben_nummer),
|
||||
None
|
||||
(e for e in ewh if e.jahr == aufgabe.jahr and e.niveau == aufgabe.niveau
|
||||
and e.aufgaben_nummer == aufgabe.aufgaben_nummer), None
|
||||
)
|
||||
if matching_ewh:
|
||||
result.append(_to_dokument_response(matching_ewh))
|
||||
|
||||
# Restliche EWH und andere
|
||||
for e in ewh:
|
||||
if _to_dokument_response(e) not in result:
|
||||
result.append(_to_dokument_response(e))
|
||||
for a in andere:
|
||||
result.append(_to_dokument_response(a))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enums Endpoint (für Frontend)
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/enums/bundeslaender")
|
||||
async def get_bundeslaender():
|
||||
"""Gibt alle Bundesländer zurück."""
|
||||
@@ -867,35 +395,7 @@ async def get_bundeslaender():
|
||||
@router.get("/enums/faecher")
|
||||
async def get_faecher():
|
||||
"""Gibt alle Fächer zurück."""
|
||||
labels = {
|
||||
Fach.DEUTSCH: "Deutsch",
|
||||
Fach.ENGLISCH: "Englisch",
|
||||
Fach.MATHEMATIK: "Mathematik",
|
||||
Fach.BIOLOGIE: "Biologie",
|
||||
Fach.CHEMIE: "Chemie",
|
||||
Fach.PHYSIK: "Physik",
|
||||
Fach.GESCHICHTE: "Geschichte",
|
||||
Fach.ERDKUNDE: "Erdkunde",
|
||||
Fach.POLITIK_WIRTSCHAFT: "Politik-Wirtschaft",
|
||||
Fach.FRANZOESISCH: "Französisch",
|
||||
Fach.SPANISCH: "Spanisch",
|
||||
Fach.LATEIN: "Latein",
|
||||
Fach.GRIECHISCH: "Griechisch",
|
||||
Fach.KUNST: "Kunst",
|
||||
Fach.MUSIK: "Musik",
|
||||
Fach.SPORT: "Sport",
|
||||
Fach.INFORMATIK: "Informatik",
|
||||
Fach.EV_RELIGION: "Ev. Religion",
|
||||
Fach.KATH_RELIGION: "Kath. Religion",
|
||||
Fach.WERTE_NORMEN: "Werte und Normen",
|
||||
Fach.BRC: "BRC (Betriebswirtschaft)",
|
||||
Fach.BVW: "BVW (Volkswirtschaft)",
|
||||
Fach.ERNAEHRUNG: "Ernährung",
|
||||
Fach.MECHATRONIK: "Mechatronik",
|
||||
Fach.GESUNDHEIT_PFLEGE: "Gesundheit-Pflege",
|
||||
Fach.PAEDAGOGIK_PSYCHOLOGIE: "Pädagogik-Psychologie",
|
||||
}
|
||||
return [{"value": f.value, "label": labels.get(f, f.value)} for f in Fach]
|
||||
return [{"value": f.value, "label": FACH_LABELS.get(f, f.value)} for f in Fach]
|
||||
|
||||
|
||||
@router.get("/enums/niveaus")
|
||||
@@ -910,47 +410,4 @@ async def get_niveaus():
|
||||
@router.get("/enums/typen")
|
||||
async def get_typen():
|
||||
"""Gibt alle Dokumenttypen zurück."""
|
||||
labels = {
|
||||
DokumentTyp.AUFGABE: "Aufgabe",
|
||||
DokumentTyp.ERWARTUNGSHORIZONT: "Erwartungshorizont",
|
||||
DokumentTyp.DECKBLATT: "Deckblatt",
|
||||
DokumentTyp.MATERIAL: "Material",
|
||||
DokumentTyp.HOERVERSTEHEN: "Hörverstehen",
|
||||
DokumentTyp.SPRACHMITTLUNG: "Sprachmittlung",
|
||||
DokumentTyp.BEWERTUNGSBOGEN: "Bewertungsbogen",
|
||||
}
|
||||
return [{"value": t.value, "label": labels.get(t, t.value)} for t in DokumentTyp]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Backwards-compatibility aliases (used by tests)
|
||||
# ============================================================================
|
||||
AbiturFach = Fach
|
||||
Anforderungsniveau = Niveau
|
||||
documents_db = _dokumente
|
||||
|
||||
|
||||
class DocumentMetadata(BaseModel):
|
||||
"""Backwards-compatible metadata model for tests."""
|
||||
jahr: Optional[int] = None
|
||||
bundesland: Optional[str] = None
|
||||
fach: Optional[str] = None
|
||||
niveau: Optional[str] = None
|
||||
dokument_typ: Optional[str] = None
|
||||
aufgaben_nummer: Optional[str] = None
|
||||
|
||||
|
||||
# Backwards-compatible AbiturDokument for tests (different from internal dataclass)
|
||||
class AbiturDokumentCompat(BaseModel):
|
||||
"""Backwards-compatible AbiturDokument model for tests."""
|
||||
id: str
|
||||
filename: str
|
||||
file_path: str
|
||||
metadata: DocumentMetadata
|
||||
status: VerarbeitungsStatus
|
||||
recognition_result: Optional[RecognitionResult] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
return [{"value": t.value, "label": DOKUMENT_TYP_LABELS.get(t, t.value)} for t in DokumentTyp]
|
||||
|
||||
327
backend-lehrer/abitur_docs_models.py
Normal file
327
backend-lehrer/abitur_docs_models.py
Normal file
@@ -0,0 +1,327 @@
|
||||
"""
|
||||
Abitur Document Store - Enums, Pydantic Models, Data Classes.
|
||||
|
||||
Shared types for abitur_docs_api and abitur_docs_recognition.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enums
|
||||
# ============================================================================
|
||||
|
||||
class Bundesland(str, Enum):
|
||||
"""Bundesländer mit Zentralabitur."""
|
||||
NIEDERSACHSEN = "niedersachsen"
|
||||
BAYERN = "bayern"
|
||||
BADEN_WUERTTEMBERG = "baden_wuerttemberg"
|
||||
NORDRHEIN_WESTFALEN = "nordrhein_westfalen"
|
||||
HESSEN = "hessen"
|
||||
SACHSEN = "sachsen"
|
||||
THUERINGEN = "thueringen"
|
||||
BERLIN = "berlin"
|
||||
HAMBURG = "hamburg"
|
||||
SCHLESWIG_HOLSTEIN = "schleswig_holstein"
|
||||
BREMEN = "bremen"
|
||||
BRANDENBURG = "brandenburg"
|
||||
MECKLENBURG_VORPOMMERN = "mecklenburg_vorpommern"
|
||||
SACHSEN_ANHALT = "sachsen_anhalt"
|
||||
RHEINLAND_PFALZ = "rheinland_pfalz"
|
||||
SAARLAND = "saarland"
|
||||
|
||||
|
||||
class Fach(str, Enum):
|
||||
"""Abiturfächer."""
|
||||
DEUTSCH = "deutsch"
|
||||
ENGLISCH = "englisch"
|
||||
MATHEMATIK = "mathematik"
|
||||
BIOLOGIE = "biologie"
|
||||
CHEMIE = "chemie"
|
||||
PHYSIK = "physik"
|
||||
GESCHICHTE = "geschichte"
|
||||
ERDKUNDE = "erdkunde"
|
||||
POLITIK_WIRTSCHAFT = "politik_wirtschaft"
|
||||
FRANZOESISCH = "franzoesisch"
|
||||
SPANISCH = "spanisch"
|
||||
LATEIN = "latein"
|
||||
GRIECHISCH = "griechisch"
|
||||
KUNST = "kunst"
|
||||
MUSIK = "musik"
|
||||
SPORT = "sport"
|
||||
INFORMATIK = "informatik"
|
||||
EV_RELIGION = "ev_religion"
|
||||
KATH_RELIGION = "kath_religion"
|
||||
WERTE_NORMEN = "werte_normen"
|
||||
BRC = "brc"
|
||||
BVW = "bvw"
|
||||
ERNAEHRUNG = "ernaehrung"
|
||||
MECHATRONIK = "mechatronik"
|
||||
GESUNDHEIT_PFLEGE = "gesundheit_pflege"
|
||||
PAEDAGOGIK_PSYCHOLOGIE = "paedagogik_psychologie"
|
||||
|
||||
|
||||
class Niveau(str, Enum):
|
||||
"""Anforderungsniveau."""
|
||||
EA = "eA"
|
||||
GA = "gA"
|
||||
|
||||
|
||||
class DokumentTyp(str, Enum):
|
||||
"""Dokumenttyp."""
|
||||
AUFGABE = "aufgabe"
|
||||
ERWARTUNGSHORIZONT = "erwartungshorizont"
|
||||
DECKBLATT = "deckblatt"
|
||||
MATERIAL = "material"
|
||||
HOERVERSTEHEN = "hoerverstehen"
|
||||
SPRACHMITTLUNG = "sprachmittlung"
|
||||
BEWERTUNGSBOGEN = "bewertungsbogen"
|
||||
|
||||
|
||||
class VerarbeitungsStatus(str, Enum):
|
||||
"""Status der Dokumentenverarbeitung."""
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
RECOGNIZED = "recognized"
|
||||
CONFIRMED = "confirmed"
|
||||
INDEXED = "indexed"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Fach-Mapping für Dateinamen
|
||||
# ============================================================================
|
||||
|
||||
FACH_NAME_MAPPING = {
|
||||
"deutsch": Fach.DEUTSCH,
|
||||
"englisch": Fach.ENGLISCH,
|
||||
"mathe": Fach.MATHEMATIK,
|
||||
"mathematik": Fach.MATHEMATIK,
|
||||
"biologie": Fach.BIOLOGIE,
|
||||
"bio": Fach.BIOLOGIE,
|
||||
"chemie": Fach.CHEMIE,
|
||||
"physik": Fach.PHYSIK,
|
||||
"geschichte": Fach.GESCHICHTE,
|
||||
"erdkunde": Fach.ERDKUNDE,
|
||||
"geographie": Fach.ERDKUNDE,
|
||||
"politikwirtschaft": Fach.POLITIK_WIRTSCHAFT,
|
||||
"politik": Fach.POLITIK_WIRTSCHAFT,
|
||||
"franzoesisch": Fach.FRANZOESISCH,
|
||||
"franz": Fach.FRANZOESISCH,
|
||||
"spanisch": Fach.SPANISCH,
|
||||
"latein": Fach.LATEIN,
|
||||
"griechisch": Fach.GRIECHISCH,
|
||||
"kunst": Fach.KUNST,
|
||||
"musik": Fach.MUSIK,
|
||||
"sport": Fach.SPORT,
|
||||
"informatik": Fach.INFORMATIK,
|
||||
"evreligion": Fach.EV_RELIGION,
|
||||
"kathreligion": Fach.KATH_RELIGION,
|
||||
"wertenormen": Fach.WERTE_NORMEN,
|
||||
"brc": Fach.BRC,
|
||||
"bvw": Fach.BVW,
|
||||
"ernaehrung": Fach.ERNAEHRUNG,
|
||||
"mecha": Fach.MECHATRONIK,
|
||||
"mechatronik": Fach.MECHATRONIK,
|
||||
"technikmecha": Fach.MECHATRONIK,
|
||||
"gespfl": Fach.GESUNDHEIT_PFLEGE,
|
||||
"paedpsych": Fach.PAEDAGOGIK_PSYCHOLOGIE,
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Pydantic Models
|
||||
# ============================================================================
|
||||
|
||||
class DokumentCreate(BaseModel):
|
||||
"""Manuelles Erstellen eines Dokuments."""
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int = Field(ge=2000, le=2100)
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str] = None
|
||||
|
||||
|
||||
class DokumentUpdate(BaseModel):
|
||||
"""Update für erkannte Metadaten."""
|
||||
bundesland: Optional[Bundesland] = None
|
||||
fach: Optional[Fach] = None
|
||||
jahr: Optional[int] = None
|
||||
niveau: Optional[Niveau] = None
|
||||
typ: Optional[DokumentTyp] = None
|
||||
aufgaben_nummer: Optional[str] = None
|
||||
status: Optional[VerarbeitungsStatus] = None
|
||||
|
||||
|
||||
class DokumentResponse(BaseModel):
|
||||
"""Response für ein Dokument."""
|
||||
id: str
|
||||
dateiname: str
|
||||
original_dateiname: str
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str]
|
||||
status: VerarbeitungsStatus
|
||||
confidence: float
|
||||
file_path: str
|
||||
file_size: int
|
||||
indexed: bool
|
||||
vector_ids: List[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class ImportResult(BaseModel):
|
||||
"""Ergebnis eines ZIP-Imports."""
|
||||
total_files: int
|
||||
recognized: int
|
||||
errors: int
|
||||
documents: List[DokumentResponse]
|
||||
|
||||
|
||||
class RecognitionResult(BaseModel):
|
||||
"""Ergebnis der Dokumentenerkennung."""
|
||||
success: bool
|
||||
bundesland: Optional[Bundesland]
|
||||
fach: Optional[Fach]
|
||||
jahr: Optional[int]
|
||||
niveau: Optional[Niveau]
|
||||
typ: Optional[DokumentTyp]
|
||||
aufgaben_nummer: Optional[str]
|
||||
confidence: float
|
||||
raw_filename: str
|
||||
suggestions: List[Dict[str, Any]]
|
||||
|
||||
@property
|
||||
def extracted(self) -> Dict[str, Any]:
|
||||
"""Backwards-compatible property returning extracted values as dict."""
|
||||
result = {}
|
||||
if self.bundesland:
|
||||
result["bundesland"] = self.bundesland.value
|
||||
if self.fach:
|
||||
result["fach"] = self.fach.value
|
||||
if self.jahr:
|
||||
result["jahr"] = self.jahr
|
||||
if self.niveau:
|
||||
result["niveau"] = self.niveau.value
|
||||
if self.typ:
|
||||
result["typ"] = self.typ.value
|
||||
if self.aufgaben_nummer:
|
||||
result["aufgaben_nummer"] = self.aufgaben_nummer
|
||||
return result
|
||||
|
||||
@property
|
||||
def method(self) -> str:
|
||||
"""Backwards-compatible property for recognition method."""
|
||||
return "filename_pattern"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Internal Data Classes
|
||||
# ============================================================================
|
||||
|
||||
@dataclass
|
||||
class AbiturDokument:
|
||||
"""Internes Dokument."""
|
||||
id: str
|
||||
dateiname: str
|
||||
original_dateiname: str
|
||||
bundesland: Bundesland
|
||||
fach: Fach
|
||||
jahr: int
|
||||
niveau: Niveau
|
||||
typ: DokumentTyp
|
||||
aufgaben_nummer: Optional[str]
|
||||
status: VerarbeitungsStatus
|
||||
confidence: float
|
||||
file_path: str
|
||||
file_size: int
|
||||
indexed: bool
|
||||
vector_ids: List[str]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Backwards-compatibility aliases (used by tests)
|
||||
# ============================================================================
|
||||
AbiturFach = Fach
|
||||
Anforderungsniveau = Niveau
|
||||
|
||||
|
||||
class DocumentMetadata(BaseModel):
|
||||
"""Backwards-compatible metadata model for tests."""
|
||||
jahr: Optional[int] = None
|
||||
bundesland: Optional[str] = None
|
||||
fach: Optional[str] = None
|
||||
niveau: Optional[str] = None
|
||||
dokument_typ: Optional[str] = None
|
||||
aufgaben_nummer: Optional[str] = None
|
||||
|
||||
|
||||
class AbiturDokumentCompat(BaseModel):
|
||||
"""Backwards-compatible AbiturDokument model for tests."""
|
||||
id: str
|
||||
filename: str
|
||||
file_path: str
|
||||
metadata: DocumentMetadata
|
||||
status: VerarbeitungsStatus
|
||||
recognition_result: Optional[RecognitionResult] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Fach Labels (für Frontend Enum-Endpoint)
|
||||
# ============================================================================
|
||||
|
||||
FACH_LABELS = {
|
||||
Fach.DEUTSCH: "Deutsch",
|
||||
Fach.ENGLISCH: "Englisch",
|
||||
Fach.MATHEMATIK: "Mathematik",
|
||||
Fach.BIOLOGIE: "Biologie",
|
||||
Fach.CHEMIE: "Chemie",
|
||||
Fach.PHYSIK: "Physik",
|
||||
Fach.GESCHICHTE: "Geschichte",
|
||||
Fach.ERDKUNDE: "Erdkunde",
|
||||
Fach.POLITIK_WIRTSCHAFT: "Politik-Wirtschaft",
|
||||
Fach.FRANZOESISCH: "Französisch",
|
||||
Fach.SPANISCH: "Spanisch",
|
||||
Fach.LATEIN: "Latein",
|
||||
Fach.GRIECHISCH: "Griechisch",
|
||||
Fach.KUNST: "Kunst",
|
||||
Fach.MUSIK: "Musik",
|
||||
Fach.SPORT: "Sport",
|
||||
Fach.INFORMATIK: "Informatik",
|
||||
Fach.EV_RELIGION: "Ev. Religion",
|
||||
Fach.KATH_RELIGION: "Kath. Religion",
|
||||
Fach.WERTE_NORMEN: "Werte und Normen",
|
||||
Fach.BRC: "BRC (Betriebswirtschaft)",
|
||||
Fach.BVW: "BVW (Volkswirtschaft)",
|
||||
Fach.ERNAEHRUNG: "Ernährung",
|
||||
Fach.MECHATRONIK: "Mechatronik",
|
||||
Fach.GESUNDHEIT_PFLEGE: "Gesundheit-Pflege",
|
||||
Fach.PAEDAGOGIK_PSYCHOLOGIE: "Pädagogik-Psychologie",
|
||||
}
|
||||
|
||||
DOKUMENT_TYP_LABELS = {
|
||||
DokumentTyp.AUFGABE: "Aufgabe",
|
||||
DokumentTyp.ERWARTUNGSHORIZONT: "Erwartungshorizont",
|
||||
DokumentTyp.DECKBLATT: "Deckblatt",
|
||||
DokumentTyp.MATERIAL: "Material",
|
||||
DokumentTyp.HOERVERSTEHEN: "Hörverstehen",
|
||||
DokumentTyp.SPRACHMITTLUNG: "Sprachmittlung",
|
||||
DokumentTyp.BEWERTUNGSBOGEN: "Bewertungsbogen",
|
||||
}
|
||||
124
backend-lehrer/abitur_docs_recognition.py
Normal file
124
backend-lehrer/abitur_docs_recognition.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""
|
||||
Abitur Document Store - Dateinamen-Erkennung und Helfer.
|
||||
|
||||
Erkennt Metadaten aus NiBiS-Dateinamen (Niedersachsen).
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Dict, Any
|
||||
from pathlib import Path
|
||||
|
||||
from abitur_docs_models import (
|
||||
Bundesland, Fach, Niveau, DokumentTyp, VerarbeitungsStatus,
|
||||
RecognitionResult, AbiturDokument, DokumentResponse,
|
||||
FACH_NAME_MAPPING,
|
||||
)
|
||||
|
||||
|
||||
def parse_nibis_filename(filename: str) -> RecognitionResult:
|
||||
"""
|
||||
Erkennt Metadaten aus NiBiS-Dateinamen.
|
||||
|
||||
Beispiele:
|
||||
- 2025_Deutsch_eA_I.pdf
|
||||
- 2025_Deutsch_eA_I_EWH.pdf
|
||||
- 2025_Biologie_gA_1.pdf
|
||||
- 2025_Englisch_eA_HV.pdf (Hörverstehen)
|
||||
"""
|
||||
result = RecognitionResult(
|
||||
success=False,
|
||||
bundesland=Bundesland.NIEDERSACHSEN,
|
||||
fach=None,
|
||||
jahr=None,
|
||||
niveau=None,
|
||||
typ=None,
|
||||
aufgaben_nummer=None,
|
||||
confidence=0.0,
|
||||
raw_filename=filename,
|
||||
suggestions=[]
|
||||
)
|
||||
|
||||
# Bereinige Dateiname
|
||||
name = Path(filename).stem.lower()
|
||||
|
||||
# Extrahiere Jahr (4 Ziffern am Anfang)
|
||||
jahr_match = re.match(r'^(\d{4})', name)
|
||||
if jahr_match:
|
||||
result.jahr = int(jahr_match.group(1))
|
||||
result.confidence += 0.2
|
||||
|
||||
# Extrahiere Fach
|
||||
for fach_key, fach_enum in FACH_NAME_MAPPING.items():
|
||||
if fach_key in name.replace("_", "").replace("-", ""):
|
||||
result.fach = fach_enum
|
||||
result.confidence += 0.3
|
||||
break
|
||||
|
||||
# Extrahiere Niveau (eA/gA)
|
||||
if "_ea" in name or "_ea_" in name or "ea_" in name:
|
||||
result.niveau = Niveau.EA
|
||||
result.confidence += 0.2
|
||||
elif "_ga" in name or "_ga_" in name or "ga_" in name:
|
||||
result.niveau = Niveau.GA
|
||||
result.confidence += 0.2
|
||||
|
||||
# Extrahiere Typ
|
||||
if "_ewh" in name:
|
||||
result.typ = DokumentTyp.ERWARTUNGSHORIZONT
|
||||
result.confidence += 0.2
|
||||
elif "_hv" in name or "hoerverstehen" in name:
|
||||
result.typ = DokumentTyp.HOERVERSTEHEN
|
||||
result.confidence += 0.15
|
||||
elif "_sm" in name or "_me" in name or "sprachmittlung" in name:
|
||||
result.typ = DokumentTyp.SPRACHMITTLUNG
|
||||
result.confidence += 0.15
|
||||
elif "deckblatt" in name:
|
||||
result.typ = DokumentTyp.DECKBLATT
|
||||
result.confidence += 0.15
|
||||
elif "material" in name:
|
||||
result.typ = DokumentTyp.MATERIAL
|
||||
result.confidence += 0.15
|
||||
elif "bewertung" in name:
|
||||
result.typ = DokumentTyp.BEWERTUNGSBOGEN
|
||||
result.confidence += 0.15
|
||||
else:
|
||||
result.typ = DokumentTyp.AUFGABE
|
||||
result.confidence += 0.1
|
||||
|
||||
# Extrahiere Aufgabennummer (römisch oder arabisch)
|
||||
aufgabe_match = re.search(r'_([ivx]+|[1-4][abc]?)(?:_|\.pdf|$)', name, re.IGNORECASE)
|
||||
if aufgabe_match:
|
||||
result.aufgaben_nummer = aufgabe_match.group(1).upper()
|
||||
result.confidence += 0.1
|
||||
|
||||
# Erfolg wenn mindestens Fach und Jahr erkannt
|
||||
if result.fach and result.jahr:
|
||||
result.success = True
|
||||
|
||||
# Normalisiere Confidence auf max 1.0
|
||||
result.confidence = min(result.confidence, 1.0)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def to_dokument_response(doc: AbiturDokument) -> DokumentResponse:
|
||||
"""Konvertiert internes Dokument zu Response."""
|
||||
return DokumentResponse(
|
||||
id=doc.id,
|
||||
dateiname=doc.dateiname,
|
||||
original_dateiname=doc.original_dateiname,
|
||||
bundesland=doc.bundesland,
|
||||
fach=doc.fach,
|
||||
jahr=doc.jahr,
|
||||
niveau=doc.niveau,
|
||||
typ=doc.typ,
|
||||
aufgaben_nummer=doc.aufgaben_nummer,
|
||||
status=doc.status,
|
||||
confidence=doc.confidence,
|
||||
file_path=doc.file_path,
|
||||
file_size=doc.file_size,
|
||||
indexed=doc.indexed,
|
||||
vector_ids=doc.vector_ids,
|
||||
created_at=doc.created_at,
|
||||
updated_at=doc.updated_at
|
||||
)
|
||||
394
backend-lehrer/alerts_agent/db/item_repository.py
Normal file
394
backend-lehrer/alerts_agent/db/item_repository.py
Normal file
@@ -0,0 +1,394 @@
|
||||
"""
|
||||
Repository für Alert Items (einzelne Alerts/Artikel).
|
||||
"""
|
||||
import hashlib
|
||||
import urllib.parse
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.orm import Session as DBSession
|
||||
from sqlalchemy import or_, func
|
||||
|
||||
from .models import (
|
||||
AlertItemDB, AlertSourceEnum, AlertStatusEnum, RelevanceDecisionEnum
|
||||
)
|
||||
|
||||
|
||||
class AlertItemRepository:
|
||||
"""Repository für Alert Items (einzelne Alerts/Artikel)."""
|
||||
|
||||
def __init__(self, db: DBSession):
|
||||
self.db = db
|
||||
|
||||
# ==================== CREATE ====================
|
||||
|
||||
def create(
|
||||
self,
|
||||
topic_id: str,
|
||||
title: str,
|
||||
url: str,
|
||||
snippet: str = "",
|
||||
source: str = "google_alerts_rss",
|
||||
published_at: datetime = None,
|
||||
lang: str = "de",
|
||||
) -> AlertItemDB:
|
||||
"""Erstellt einen neuen Alert."""
|
||||
url_hash = self._compute_url_hash(url)
|
||||
|
||||
alert = AlertItemDB(
|
||||
id=str(uuid.uuid4()),
|
||||
topic_id=topic_id,
|
||||
title=title,
|
||||
url=url,
|
||||
snippet=snippet,
|
||||
source=AlertSourceEnum(source),
|
||||
published_at=published_at,
|
||||
lang=lang,
|
||||
url_hash=url_hash,
|
||||
canonical_url=self._normalize_url(url),
|
||||
)
|
||||
self.db.add(alert)
|
||||
self.db.commit()
|
||||
self.db.refresh(alert)
|
||||
return alert
|
||||
|
||||
def create_if_not_exists(
|
||||
self,
|
||||
topic_id: str,
|
||||
title: str,
|
||||
url: str,
|
||||
snippet: str = "",
|
||||
source: str = "google_alerts_rss",
|
||||
published_at: datetime = None,
|
||||
) -> Optional[AlertItemDB]:
|
||||
"""Erstellt einen Alert nur wenn URL noch nicht existiert."""
|
||||
url_hash = self._compute_url_hash(url)
|
||||
|
||||
existing = self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.url_hash == url_hash
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
return None # Duplikat
|
||||
|
||||
return self.create(
|
||||
topic_id=topic_id,
|
||||
title=title,
|
||||
url=url,
|
||||
snippet=snippet,
|
||||
source=source,
|
||||
published_at=published_at,
|
||||
)
|
||||
|
||||
# ==================== READ ====================
|
||||
|
||||
def get_by_id(self, alert_id: str) -> Optional[AlertItemDB]:
|
||||
"""Holt einen Alert nach ID."""
|
||||
return self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.id == alert_id
|
||||
).first()
|
||||
|
||||
def get_by_url_hash(self, url_hash: str) -> Optional[AlertItemDB]:
|
||||
"""Holt einen Alert nach URL-Hash."""
|
||||
return self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.url_hash == url_hash
|
||||
).first()
|
||||
|
||||
def get_inbox(
|
||||
self,
|
||||
user_id: str = None,
|
||||
topic_id: str = None,
|
||||
decision: str = None,
|
||||
status: str = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> List[AlertItemDB]:
|
||||
"""
|
||||
Holt Inbox-Items mit Filtern.
|
||||
|
||||
Ohne decision werden KEEP und REVIEW angezeigt.
|
||||
"""
|
||||
query = self.db.query(AlertItemDB)
|
||||
|
||||
if topic_id:
|
||||
query = query.filter(AlertItemDB.topic_id == topic_id)
|
||||
|
||||
if decision:
|
||||
query = query.filter(
|
||||
AlertItemDB.relevance_decision == RelevanceDecisionEnum(decision)
|
||||
)
|
||||
else:
|
||||
# Default: KEEP und REVIEW
|
||||
query = query.filter(
|
||||
or_(
|
||||
AlertItemDB.relevance_decision == RelevanceDecisionEnum.KEEP,
|
||||
AlertItemDB.relevance_decision == RelevanceDecisionEnum.REVIEW,
|
||||
AlertItemDB.relevance_decision.is_(None)
|
||||
)
|
||||
)
|
||||
|
||||
if status:
|
||||
query = query.filter(AlertItemDB.status == AlertStatusEnum(status))
|
||||
|
||||
return query.order_by(
|
||||
AlertItemDB.relevance_score.desc().nullslast(),
|
||||
AlertItemDB.fetched_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
|
||||
def get_unscored(
|
||||
self,
|
||||
topic_id: str = None,
|
||||
limit: int = 100,
|
||||
) -> List[AlertItemDB]:
|
||||
"""Holt alle unbewerteten Alerts."""
|
||||
query = self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.status == AlertStatusEnum.NEW
|
||||
)
|
||||
|
||||
if topic_id:
|
||||
query = query.filter(AlertItemDB.topic_id == topic_id)
|
||||
|
||||
return query.order_by(AlertItemDB.fetched_at.desc()).limit(limit).all()
|
||||
|
||||
def get_by_topic(
|
||||
self,
|
||||
topic_id: str,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> List[AlertItemDB]:
|
||||
"""Holt alle Alerts eines Topics."""
|
||||
return self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.topic_id == topic_id
|
||||
).order_by(
|
||||
AlertItemDB.fetched_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
|
||||
def count_by_status(self, topic_id: str = None) -> Dict[str, int]:
|
||||
"""Zählt Alerts nach Status."""
|
||||
query = self.db.query(
|
||||
AlertItemDB.status,
|
||||
func.count(AlertItemDB.id).label('count')
|
||||
)
|
||||
|
||||
if topic_id:
|
||||
query = query.filter(AlertItemDB.topic_id == topic_id)
|
||||
|
||||
results = query.group_by(AlertItemDB.status).all()
|
||||
|
||||
return {r[0].value: r[1] for r in results}
|
||||
|
||||
def count_by_decision(self, topic_id: str = None) -> Dict[str, int]:
|
||||
"""Zählt Alerts nach Relevanz-Entscheidung."""
|
||||
query = self.db.query(
|
||||
AlertItemDB.relevance_decision,
|
||||
func.count(AlertItemDB.id).label('count')
|
||||
)
|
||||
|
||||
if topic_id:
|
||||
query = query.filter(AlertItemDB.topic_id == topic_id)
|
||||
|
||||
results = query.group_by(AlertItemDB.relevance_decision).all()
|
||||
|
||||
return {
|
||||
(r[0].value if r[0] else "unscored"): r[1]
|
||||
for r in results
|
||||
}
|
||||
|
||||
# ==================== UPDATE ====================
|
||||
|
||||
def update_scoring(
|
||||
self,
|
||||
alert_id: str,
|
||||
score: float,
|
||||
decision: str,
|
||||
reasons: List[str] = None,
|
||||
summary: str = None,
|
||||
model: str = None,
|
||||
) -> Optional[AlertItemDB]:
|
||||
"""Aktualisiert das Scoring eines Alerts."""
|
||||
alert = self.get_by_id(alert_id)
|
||||
if not alert:
|
||||
return None
|
||||
|
||||
alert.relevance_score = score
|
||||
alert.relevance_decision = RelevanceDecisionEnum(decision)
|
||||
alert.relevance_reasons = reasons or []
|
||||
alert.relevance_summary = summary
|
||||
alert.scored_by_model = model
|
||||
alert.scored_at = datetime.utcnow()
|
||||
alert.status = AlertStatusEnum.SCORED
|
||||
alert.processed_at = datetime.utcnow()
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(alert)
|
||||
return alert
|
||||
|
||||
def update_status(
|
||||
self,
|
||||
alert_id: str,
|
||||
status: str,
|
||||
) -> Optional[AlertItemDB]:
|
||||
"""Aktualisiert den Status eines Alerts."""
|
||||
alert = self.get_by_id(alert_id)
|
||||
if not alert:
|
||||
return None
|
||||
|
||||
alert.status = AlertStatusEnum(status)
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(alert)
|
||||
return alert
|
||||
|
||||
def mark_reviewed(
|
||||
self,
|
||||
alert_id: str,
|
||||
is_relevant: bool,
|
||||
notes: str = None,
|
||||
tags: List[str] = None,
|
||||
) -> Optional[AlertItemDB]:
|
||||
"""Markiert einen Alert als reviewed mit Feedback."""
|
||||
alert = self.get_by_id(alert_id)
|
||||
if not alert:
|
||||
return None
|
||||
|
||||
alert.status = AlertStatusEnum.REVIEWED
|
||||
alert.user_marked_relevant = is_relevant
|
||||
if notes:
|
||||
alert.user_notes = notes
|
||||
if tags:
|
||||
alert.user_tags = tags
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(alert)
|
||||
return alert
|
||||
|
||||
def archive(self, alert_id: str) -> Optional[AlertItemDB]:
|
||||
"""Archiviert einen Alert."""
|
||||
return self.update_status(alert_id, "archived")
|
||||
|
||||
# ==================== DELETE ====================
|
||||
|
||||
def delete(self, alert_id: str) -> bool:
|
||||
"""Löscht einen Alert."""
|
||||
alert = self.get_by_id(alert_id)
|
||||
if not alert:
|
||||
return False
|
||||
|
||||
self.db.delete(alert)
|
||||
self.db.commit()
|
||||
return True
|
||||
|
||||
def delete_old(self, days: int = 90, topic_id: str = None) -> int:
|
||||
"""Löscht alte archivierte Alerts."""
|
||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
||||
|
||||
query = self.db.query(AlertItemDB).filter(
|
||||
AlertItemDB.status == AlertStatusEnum.ARCHIVED,
|
||||
AlertItemDB.fetched_at < cutoff,
|
||||
)
|
||||
|
||||
if topic_id:
|
||||
query = query.filter(AlertItemDB.topic_id == topic_id)
|
||||
|
||||
count = query.delete()
|
||||
self.db.commit()
|
||||
return count
|
||||
|
||||
# ==================== FOR RSS FETCHER ====================
|
||||
|
||||
def get_existing_urls(self, topic_id: str) -> set:
|
||||
"""
|
||||
Holt alle bekannten URL-Hashes für ein Topic.
|
||||
|
||||
Wird vom RSS-Fetcher verwendet um Duplikate zu vermeiden.
|
||||
"""
|
||||
results = self.db.query(AlertItemDB.url_hash).filter(
|
||||
AlertItemDB.topic_id == topic_id
|
||||
).all()
|
||||
|
||||
return {r[0] for r in results if r[0]}
|
||||
|
||||
def create_from_alert_item(self, alert_item, topic_id: str) -> AlertItemDB:
|
||||
"""
|
||||
Erstellt einen Alert aus einem AlertItem-Objekt vom RSS-Fetcher.
|
||||
|
||||
Args:
|
||||
alert_item: AlertItem from rss_fetcher
|
||||
topic_id: Topic ID to associate with
|
||||
|
||||
Returns:
|
||||
Created AlertItemDB instance
|
||||
"""
|
||||
return self.create(
|
||||
topic_id=topic_id,
|
||||
title=alert_item.title,
|
||||
url=alert_item.url,
|
||||
snippet=alert_item.snippet or "",
|
||||
source=alert_item.source.value if hasattr(alert_item.source, 'value') else str(alert_item.source),
|
||||
published_at=alert_item.published_at,
|
||||
)
|
||||
|
||||
# ==================== HELPER ====================
|
||||
|
||||
def _compute_url_hash(self, url: str) -> str:
|
||||
"""Berechnet SHA256 Hash der normalisierten URL."""
|
||||
normalized = self._normalize_url(url)
|
||||
return hashlib.sha256(normalized.encode()).hexdigest()[:16]
|
||||
|
||||
def _normalize_url(self, url: str) -> str:
|
||||
"""Normalisiert URL für Deduplizierung."""
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
|
||||
# Tracking-Parameter entfernen
|
||||
tracking_params = {
|
||||
"utm_source", "utm_medium", "utm_campaign", "utm_content", "utm_term",
|
||||
"fbclid", "gclid", "ref", "source"
|
||||
}
|
||||
|
||||
query_params = urllib.parse.parse_qs(parsed.query)
|
||||
cleaned_params = {k: v for k, v in query_params.items()
|
||||
if k.lower() not in tracking_params}
|
||||
|
||||
cleaned_query = urllib.parse.urlencode(cleaned_params, doseq=True)
|
||||
|
||||
# Rekonstruiere URL ohne Fragment
|
||||
normalized = urllib.parse.urlunparse((
|
||||
parsed.scheme,
|
||||
parsed.netloc.lower(),
|
||||
parsed.path.rstrip("/"),
|
||||
parsed.params,
|
||||
cleaned_query,
|
||||
"" # No fragment
|
||||
))
|
||||
|
||||
return normalized
|
||||
|
||||
# ==================== CONVERSION ====================
|
||||
|
||||
def to_dict(self, alert: AlertItemDB) -> Dict[str, Any]:
|
||||
"""Konvertiert DB-Model zu Dictionary."""
|
||||
return {
|
||||
"id": alert.id,
|
||||
"topic_id": alert.topic_id,
|
||||
"title": alert.title,
|
||||
"url": alert.url,
|
||||
"snippet": alert.snippet,
|
||||
"source": alert.source.value,
|
||||
"lang": alert.lang,
|
||||
"published_at": alert.published_at.isoformat() if alert.published_at else None,
|
||||
"fetched_at": alert.fetched_at.isoformat() if alert.fetched_at else None,
|
||||
"status": alert.status.value,
|
||||
"relevance": {
|
||||
"score": alert.relevance_score,
|
||||
"decision": alert.relevance_decision.value if alert.relevance_decision else None,
|
||||
"reasons": alert.relevance_reasons,
|
||||
"summary": alert.relevance_summary,
|
||||
"model": alert.scored_by_model,
|
||||
"scored_at": alert.scored_at.isoformat() if alert.scored_at else None,
|
||||
},
|
||||
"user_feedback": {
|
||||
"marked_relevant": alert.user_marked_relevant,
|
||||
"tags": alert.user_tags,
|
||||
"notes": alert.user_notes,
|
||||
},
|
||||
}
|
||||
226
backend-lehrer/alerts_agent/db/profile_repository.py
Normal file
226
backend-lehrer/alerts_agent/db/profile_repository.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Repository für Alert Profiles (Nutzer-Profile für Relevanz-Scoring).
|
||||
"""
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.orm import Session as DBSession
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
|
||||
from .models import AlertProfileDB
|
||||
|
||||
|
||||
class ProfileRepository:
|
||||
"""Repository für Alert Profiles (Nutzer-Profile für Relevanz-Scoring)."""
|
||||
|
||||
def __init__(self, db: DBSession):
|
||||
self.db = db
|
||||
|
||||
# ==================== CREATE / GET-OR-CREATE ====================
|
||||
|
||||
def get_or_create(self, user_id: str = None) -> AlertProfileDB:
|
||||
"""Holt oder erstellt ein Profil."""
|
||||
profile = self.get_by_user_id(user_id)
|
||||
if profile:
|
||||
return profile
|
||||
|
||||
# Neues Profil erstellen
|
||||
profile = AlertProfileDB(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
name="Default" if not user_id else f"Profile {user_id[:8]}",
|
||||
)
|
||||
self.db.add(profile)
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
def create_default_education_profile(self, user_id: str = None) -> AlertProfileDB:
|
||||
"""Erstellt ein Standard-Profil für Bildungsthemen."""
|
||||
profile = AlertProfileDB(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
name="Bildung Default",
|
||||
priorities=[
|
||||
{
|
||||
"label": "Inklusion",
|
||||
"weight": 0.9,
|
||||
"keywords": ["inklusiv", "Förderbedarf", "Behinderung", "Barrierefreiheit"],
|
||||
"description": "Inklusive Bildung, Förderschulen, Nachteilsausgleich"
|
||||
},
|
||||
{
|
||||
"label": "Datenschutz Schule",
|
||||
"weight": 0.85,
|
||||
"keywords": ["DSGVO", "Schülerfotos", "Einwilligung", "personenbezogene Daten"],
|
||||
"description": "DSGVO in Schulen, Datenschutz bei Klassenfotos"
|
||||
},
|
||||
{
|
||||
"label": "Schulrecht Bayern",
|
||||
"weight": 0.8,
|
||||
"keywords": ["BayEUG", "Schulordnung", "Kultusministerium", "Bayern"],
|
||||
"description": "Bayerisches Schulrecht, Verordnungen"
|
||||
},
|
||||
{
|
||||
"label": "Digitalisierung Schule",
|
||||
"weight": 0.7,
|
||||
"keywords": ["DigitalPakt", "Tablet-Klasse", "Lernplattform"],
|
||||
"description": "Digitale Medien im Unterricht"
|
||||
},
|
||||
],
|
||||
exclusions=["Stellenanzeige", "Praktikum gesucht", "Werbung", "Pressemitteilung"],
|
||||
policies={
|
||||
"prefer_german_sources": True,
|
||||
"max_age_days": 30,
|
||||
"min_content_length": 100,
|
||||
}
|
||||
)
|
||||
self.db.add(profile)
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
# ==================== READ ====================
|
||||
|
||||
def get_by_id(self, profile_id: str) -> Optional[AlertProfileDB]:
|
||||
"""Holt ein Profil nach ID."""
|
||||
return self.db.query(AlertProfileDB).filter(
|
||||
AlertProfileDB.id == profile_id
|
||||
).first()
|
||||
|
||||
def get_by_user_id(self, user_id: str) -> Optional[AlertProfileDB]:
|
||||
"""Holt ein Profil nach User-ID."""
|
||||
if not user_id:
|
||||
# Default-Profil ohne User
|
||||
return self.db.query(AlertProfileDB).filter(
|
||||
AlertProfileDB.user_id.is_(None)
|
||||
).first()
|
||||
|
||||
return self.db.query(AlertProfileDB).filter(
|
||||
AlertProfileDB.user_id == user_id
|
||||
).first()
|
||||
|
||||
# ==================== UPDATE ====================
|
||||
|
||||
def update_priorities(
|
||||
self,
|
||||
profile_id: str,
|
||||
priorities: List[Dict],
|
||||
) -> Optional[AlertProfileDB]:
|
||||
"""Aktualisiert die Prioritäten eines Profils."""
|
||||
profile = self.get_by_id(profile_id)
|
||||
if not profile:
|
||||
return None
|
||||
|
||||
profile.priorities = priorities
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
def update_exclusions(
|
||||
self,
|
||||
profile_id: str,
|
||||
exclusions: List[str],
|
||||
) -> Optional[AlertProfileDB]:
|
||||
"""Aktualisiert die Ausschlüsse eines Profils."""
|
||||
profile = self.get_by_id(profile_id)
|
||||
if not profile:
|
||||
return None
|
||||
|
||||
profile.exclusions = exclusions
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
def add_feedback(
|
||||
self,
|
||||
profile_id: str,
|
||||
title: str,
|
||||
url: str,
|
||||
is_relevant: bool,
|
||||
reason: str = "",
|
||||
) -> Optional[AlertProfileDB]:
|
||||
"""Fügt Feedback als Beispiel hinzu."""
|
||||
profile = self.get_by_id(profile_id)
|
||||
if not profile:
|
||||
return None
|
||||
|
||||
example = {
|
||||
"title": title,
|
||||
"url": url,
|
||||
"reason": reason,
|
||||
"added_at": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
if is_relevant:
|
||||
examples = list(profile.positive_examples or [])
|
||||
examples.append(example)
|
||||
profile.positive_examples = examples[-20:] # Max 20
|
||||
profile.total_kept += 1
|
||||
flag_modified(profile, "positive_examples")
|
||||
else:
|
||||
examples = list(profile.negative_examples or [])
|
||||
examples.append(example)
|
||||
profile.negative_examples = examples[-20:] # Max 20
|
||||
profile.total_dropped += 1
|
||||
flag_modified(profile, "negative_examples")
|
||||
|
||||
profile.total_scored += 1
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
def update_stats(
|
||||
self,
|
||||
profile_id: str,
|
||||
kept: int = 0,
|
||||
dropped: int = 0,
|
||||
) -> Optional[AlertProfileDB]:
|
||||
"""Aktualisiert die Statistiken eines Profils."""
|
||||
profile = self.get_by_id(profile_id)
|
||||
if not profile:
|
||||
return None
|
||||
|
||||
profile.total_scored += kept + dropped
|
||||
profile.total_kept += kept
|
||||
profile.total_dropped += dropped
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(profile)
|
||||
return profile
|
||||
|
||||
# ==================== DELETE ====================
|
||||
|
||||
def delete(self, profile_id: str) -> bool:
|
||||
"""Löscht ein Profil."""
|
||||
profile = self.get_by_id(profile_id)
|
||||
if not profile:
|
||||
return False
|
||||
|
||||
self.db.delete(profile)
|
||||
self.db.commit()
|
||||
return True
|
||||
|
||||
# ==================== CONVERSION ====================
|
||||
|
||||
def to_dict(self, profile: AlertProfileDB) -> Dict[str, Any]:
|
||||
"""Konvertiert DB-Model zu Dictionary."""
|
||||
return {
|
||||
"id": profile.id,
|
||||
"user_id": profile.user_id,
|
||||
"name": profile.name,
|
||||
"priorities": profile.priorities,
|
||||
"exclusions": profile.exclusions,
|
||||
"policies": profile.policies,
|
||||
"examples": {
|
||||
"positive": len(profile.positive_examples or []),
|
||||
"negative": len(profile.negative_examples or []),
|
||||
},
|
||||
"stats": {
|
||||
"total_scored": profile.total_scored,
|
||||
"total_kept": profile.total_kept,
|
||||
"total_dropped": profile.total_dropped,
|
||||
"accuracy_estimate": profile.accuracy_estimate,
|
||||
},
|
||||
"created_at": profile.created_at.isoformat() if profile.created_at else None,
|
||||
"updated_at": profile.updated_at.isoformat() if profile.updated_at else None,
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
187
backend-lehrer/alerts_agent/db/rule_repository.py
Normal file
187
backend-lehrer/alerts_agent/db/rule_repository.py
Normal file
@@ -0,0 +1,187 @@
|
||||
"""
|
||||
Repository für Alert Rules (Filterregeln).
|
||||
"""
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.orm import Session as DBSession
|
||||
from sqlalchemy import or_
|
||||
|
||||
from .models import AlertRuleDB, RuleActionEnum
|
||||
|
||||
|
||||
class RuleRepository:
|
||||
"""Repository für Alert Rules (Filterregeln)."""
|
||||
|
||||
def __init__(self, db: DBSession):
|
||||
self.db = db
|
||||
|
||||
# ==================== CREATE ====================
|
||||
|
||||
def create(
|
||||
self,
|
||||
name: str,
|
||||
conditions: List[Dict],
|
||||
action_type: str = "keep",
|
||||
action_config: Dict = None,
|
||||
topic_id: str = None,
|
||||
user_id: str = None,
|
||||
description: str = "",
|
||||
priority: int = 0,
|
||||
) -> AlertRuleDB:
|
||||
"""Erstellt eine neue Regel."""
|
||||
rule = AlertRuleDB(
|
||||
id=str(uuid.uuid4()),
|
||||
topic_id=topic_id,
|
||||
user_id=user_id,
|
||||
name=name,
|
||||
description=description,
|
||||
conditions=conditions,
|
||||
action_type=RuleActionEnum(action_type),
|
||||
action_config=action_config or {},
|
||||
priority=priority,
|
||||
)
|
||||
self.db.add(rule)
|
||||
self.db.commit()
|
||||
self.db.refresh(rule)
|
||||
return rule
|
||||
|
||||
# ==================== READ ====================
|
||||
|
||||
def get_by_id(self, rule_id: str) -> Optional[AlertRuleDB]:
|
||||
"""Holt eine Regel nach ID."""
|
||||
return self.db.query(AlertRuleDB).filter(
|
||||
AlertRuleDB.id == rule_id
|
||||
).first()
|
||||
|
||||
def get_active(
|
||||
self,
|
||||
topic_id: str = None,
|
||||
user_id: str = None,
|
||||
) -> List[AlertRuleDB]:
|
||||
"""Holt alle aktiven Regeln, sortiert nach Priorität."""
|
||||
query = self.db.query(AlertRuleDB).filter(
|
||||
AlertRuleDB.is_active == True
|
||||
)
|
||||
|
||||
if topic_id:
|
||||
# Topic-spezifische und globale Regeln
|
||||
query = query.filter(
|
||||
or_(
|
||||
AlertRuleDB.topic_id == topic_id,
|
||||
AlertRuleDB.topic_id.is_(None)
|
||||
)
|
||||
)
|
||||
|
||||
if user_id:
|
||||
query = query.filter(
|
||||
or_(
|
||||
AlertRuleDB.user_id == user_id,
|
||||
AlertRuleDB.user_id.is_(None)
|
||||
)
|
||||
)
|
||||
|
||||
return query.order_by(AlertRuleDB.priority.desc()).all()
|
||||
|
||||
def get_all(
|
||||
self,
|
||||
user_id: str = None,
|
||||
topic_id: str = None,
|
||||
is_active: bool = None,
|
||||
) -> List[AlertRuleDB]:
|
||||
"""Holt alle Regeln mit optionalen Filtern."""
|
||||
query = self.db.query(AlertRuleDB)
|
||||
|
||||
if user_id:
|
||||
query = query.filter(AlertRuleDB.user_id == user_id)
|
||||
if topic_id:
|
||||
query = query.filter(AlertRuleDB.topic_id == topic_id)
|
||||
if is_active is not None:
|
||||
query = query.filter(AlertRuleDB.is_active == is_active)
|
||||
|
||||
return query.order_by(AlertRuleDB.priority.desc()).all()
|
||||
|
||||
# ==================== UPDATE ====================
|
||||
|
||||
def update(
|
||||
self,
|
||||
rule_id: str,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
conditions: List[Dict] = None,
|
||||
action_type: str = None,
|
||||
action_config: Dict = None,
|
||||
priority: int = None,
|
||||
is_active: bool = None,
|
||||
) -> Optional[AlertRuleDB]:
|
||||
"""Aktualisiert eine Regel."""
|
||||
rule = self.get_by_id(rule_id)
|
||||
if not rule:
|
||||
return None
|
||||
|
||||
if name is not None:
|
||||
rule.name = name
|
||||
if description is not None:
|
||||
rule.description = description
|
||||
if conditions is not None:
|
||||
rule.conditions = conditions
|
||||
if action_type is not None:
|
||||
rule.action_type = RuleActionEnum(action_type)
|
||||
if action_config is not None:
|
||||
rule.action_config = action_config
|
||||
if priority is not None:
|
||||
rule.priority = priority
|
||||
if is_active is not None:
|
||||
rule.is_active = is_active
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(rule)
|
||||
return rule
|
||||
|
||||
def increment_match_count(self, rule_id: str) -> Optional[AlertRuleDB]:
|
||||
"""Erhöht den Match-Counter einer Regel."""
|
||||
rule = self.get_by_id(rule_id)
|
||||
if not rule:
|
||||
return None
|
||||
|
||||
rule.match_count += 1
|
||||
rule.last_matched_at = datetime.utcnow()
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(rule)
|
||||
return rule
|
||||
|
||||
# ==================== DELETE ====================
|
||||
|
||||
def delete(self, rule_id: str) -> bool:
|
||||
"""Löscht eine Regel."""
|
||||
rule = self.get_by_id(rule_id)
|
||||
if not rule:
|
||||
return False
|
||||
|
||||
self.db.delete(rule)
|
||||
self.db.commit()
|
||||
return True
|
||||
|
||||
# ==================== CONVERSION ====================
|
||||
|
||||
def to_dict(self, rule: AlertRuleDB) -> Dict[str, Any]:
|
||||
"""Konvertiert DB-Model zu Dictionary."""
|
||||
return {
|
||||
"id": rule.id,
|
||||
"topic_id": rule.topic_id,
|
||||
"user_id": rule.user_id,
|
||||
"name": rule.name,
|
||||
"description": rule.description,
|
||||
"conditions": rule.conditions,
|
||||
"action_type": rule.action_type.value,
|
||||
"action_config": rule.action_config,
|
||||
"priority": rule.priority,
|
||||
"is_active": rule.is_active,
|
||||
"stats": {
|
||||
"match_count": rule.match_count,
|
||||
"last_matched_at": rule.last_matched_at.isoformat() if rule.last_matched_at else None,
|
||||
},
|
||||
"created_at": rule.created_at.isoformat() if rule.created_at else None,
|
||||
"updated_at": rule.updated_at.isoformat() if rule.updated_at else None,
|
||||
}
|
||||
185
backend-lehrer/alerts_agent/db/topic_repository.py
Normal file
185
backend-lehrer/alerts_agent/db/topic_repository.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""
|
||||
Repository für Alert Topics (Feed-Quellen).
|
||||
"""
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.orm import Session as DBSession
|
||||
|
||||
from .models import AlertTopicDB, FeedTypeEnum
|
||||
|
||||
|
||||
class TopicRepository:
|
||||
"""Repository für Alert Topics (Feed-Quellen)."""
|
||||
|
||||
def __init__(self, db: DBSession):
|
||||
self.db = db
|
||||
|
||||
# ==================== CREATE ====================
|
||||
|
||||
def create(
|
||||
self,
|
||||
name: str,
|
||||
feed_url: str = None,
|
||||
feed_type: str = "rss",
|
||||
user_id: str = None,
|
||||
description: str = "",
|
||||
fetch_interval_minutes: int = 60,
|
||||
is_active: bool = True,
|
||||
) -> AlertTopicDB:
|
||||
"""Erstellt ein neues Topic."""
|
||||
topic = AlertTopicDB(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
name=name,
|
||||
description=description,
|
||||
feed_url=feed_url,
|
||||
feed_type=FeedTypeEnum(feed_type),
|
||||
fetch_interval_minutes=fetch_interval_minutes,
|
||||
is_active=is_active,
|
||||
)
|
||||
self.db.add(topic)
|
||||
self.db.commit()
|
||||
self.db.refresh(topic)
|
||||
return topic
|
||||
|
||||
# ==================== READ ====================
|
||||
|
||||
def get_by_id(self, topic_id: str) -> Optional[AlertTopicDB]:
|
||||
"""Holt ein Topic nach ID."""
|
||||
return self.db.query(AlertTopicDB).filter(
|
||||
AlertTopicDB.id == topic_id
|
||||
).first()
|
||||
|
||||
def get_all(
|
||||
self,
|
||||
user_id: str = None,
|
||||
is_active: bool = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> List[AlertTopicDB]:
|
||||
"""Holt alle Topics mit optionalen Filtern."""
|
||||
query = self.db.query(AlertTopicDB)
|
||||
|
||||
if user_id:
|
||||
query = query.filter(AlertTopicDB.user_id == user_id)
|
||||
if is_active is not None:
|
||||
query = query.filter(AlertTopicDB.is_active == is_active)
|
||||
|
||||
return query.order_by(
|
||||
AlertTopicDB.created_at.desc()
|
||||
).offset(offset).limit(limit).all()
|
||||
|
||||
def get_active_for_fetch(self) -> List[AlertTopicDB]:
|
||||
"""Holt alle aktiven Topics die gefetcht werden sollten."""
|
||||
return self.db.query(AlertTopicDB).filter(
|
||||
AlertTopicDB.is_active == True,
|
||||
AlertTopicDB.feed_url.isnot(None),
|
||||
).all()
|
||||
|
||||
# ==================== UPDATE ====================
|
||||
|
||||
def update(
|
||||
self,
|
||||
topic_id: str,
|
||||
name: str = None,
|
||||
description: str = None,
|
||||
feed_url: str = None,
|
||||
feed_type: str = None,
|
||||
is_active: bool = None,
|
||||
fetch_interval_minutes: int = None,
|
||||
) -> Optional[AlertTopicDB]:
|
||||
"""Aktualisiert ein Topic."""
|
||||
topic = self.get_by_id(topic_id)
|
||||
if not topic:
|
||||
return None
|
||||
|
||||
if name is not None:
|
||||
topic.name = name
|
||||
if description is not None:
|
||||
topic.description = description
|
||||
if feed_url is not None:
|
||||
topic.feed_url = feed_url
|
||||
if feed_type is not None:
|
||||
topic.feed_type = FeedTypeEnum(feed_type)
|
||||
if is_active is not None:
|
||||
topic.is_active = is_active
|
||||
if fetch_interval_minutes is not None:
|
||||
topic.fetch_interval_minutes = fetch_interval_minutes
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(topic)
|
||||
return topic
|
||||
|
||||
def update_fetch_status(
|
||||
self,
|
||||
topic_id: str,
|
||||
last_fetch_error: str = None,
|
||||
items_fetched: int = 0,
|
||||
) -> Optional[AlertTopicDB]:
|
||||
"""Aktualisiert den Fetch-Status eines Topics."""
|
||||
topic = self.get_by_id(topic_id)
|
||||
if not topic:
|
||||
return None
|
||||
|
||||
topic.last_fetched_at = datetime.utcnow()
|
||||
topic.last_fetch_error = last_fetch_error
|
||||
topic.total_items_fetched += items_fetched
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(topic)
|
||||
return topic
|
||||
|
||||
def increment_stats(
|
||||
self,
|
||||
topic_id: str,
|
||||
kept: int = 0,
|
||||
dropped: int = 0,
|
||||
) -> Optional[AlertTopicDB]:
|
||||
"""Erhöht die Statistiken eines Topics."""
|
||||
topic = self.get_by_id(topic_id)
|
||||
if not topic:
|
||||
return None
|
||||
|
||||
topic.items_kept += kept
|
||||
topic.items_dropped += dropped
|
||||
|
||||
self.db.commit()
|
||||
self.db.refresh(topic)
|
||||
return topic
|
||||
|
||||
# ==================== DELETE ====================
|
||||
|
||||
def delete(self, topic_id: str) -> bool:
|
||||
"""Löscht ein Topic (und alle zugehörigen Items via CASCADE)."""
|
||||
topic = self.get_by_id(topic_id)
|
||||
if not topic:
|
||||
return False
|
||||
|
||||
self.db.delete(topic)
|
||||
self.db.commit()
|
||||
return True
|
||||
|
||||
# ==================== CONVERSION ====================
|
||||
|
||||
def to_dict(self, topic: AlertTopicDB) -> Dict[str, Any]:
|
||||
"""Konvertiert DB-Model zu Dictionary."""
|
||||
return {
|
||||
"id": topic.id,
|
||||
"user_id": topic.user_id,
|
||||
"name": topic.name,
|
||||
"description": topic.description,
|
||||
"feed_url": topic.feed_url,
|
||||
"feed_type": topic.feed_type.value,
|
||||
"is_active": topic.is_active,
|
||||
"fetch_interval_minutes": topic.fetch_interval_minutes,
|
||||
"last_fetched_at": topic.last_fetched_at.isoformat() if topic.last_fetched_at else None,
|
||||
"last_fetch_error": topic.last_fetch_error,
|
||||
"stats": {
|
||||
"total_items_fetched": topic.total_items_fetched,
|
||||
"items_kept": topic.items_kept,
|
||||
"items_dropped": topic.items_dropped,
|
||||
},
|
||||
"created_at": topic.created_at.isoformat() if topic.created_at else None,
|
||||
"updated_at": topic.updated_at.isoformat() if topic.updated_at else None,
|
||||
}
|
||||
84
backend-lehrer/services/pdf_models.py
Normal file
84
backend-lehrer/services/pdf_models.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""
|
||||
PDF Service - Data Models and Shared Types.
|
||||
|
||||
Dataclasses for letters, certificates, and corrections.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional, List
|
||||
|
||||
|
||||
@dataclass
|
||||
class SchoolInfo:
|
||||
"""Schulinformationen für Header."""
|
||||
name: str
|
||||
address: str
|
||||
phone: str
|
||||
email: str
|
||||
logo_path: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
principal: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class LetterData:
|
||||
"""Daten für Elternbrief-PDF."""
|
||||
recipient_name: str
|
||||
recipient_address: str
|
||||
student_name: str
|
||||
student_class: str
|
||||
subject: str
|
||||
content: str
|
||||
date: str
|
||||
teacher_name: str
|
||||
teacher_title: Optional[str] = None
|
||||
school_info: Optional[SchoolInfo] = None
|
||||
letter_type: str = "general" # general, halbjahr, fehlzeiten, elternabend, lob
|
||||
tone: str = "professional"
|
||||
legal_references: Optional[List[Dict[str, str]]] = None
|
||||
gfk_principles_applied: Optional[List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CertificateData:
|
||||
"""Daten für Zeugnis-PDF."""
|
||||
student_name: str
|
||||
student_birthdate: str
|
||||
student_class: str
|
||||
school_year: str
|
||||
certificate_type: str # halbjahr, jahres, abschluss
|
||||
subjects: List[Dict[str, Any]] # [{name, grade, note}]
|
||||
attendance: Dict[str, int] # {days_absent, days_excused, days_unexcused}
|
||||
remarks: Optional[str] = None
|
||||
class_teacher: str = ""
|
||||
principal: str = ""
|
||||
school_info: Optional[SchoolInfo] = None
|
||||
issue_date: str = ""
|
||||
social_behavior: Optional[str] = None # A, B, C, D
|
||||
work_behavior: Optional[str] = None # A, B, C, D
|
||||
|
||||
|
||||
@dataclass
|
||||
class StudentInfo:
|
||||
"""Schülerinformationen für Korrektur-PDFs."""
|
||||
student_id: str
|
||||
name: str
|
||||
class_name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class CorrectionData:
|
||||
"""Daten für Korrektur-Übersicht PDF."""
|
||||
student: StudentInfo
|
||||
exam_title: str
|
||||
subject: str
|
||||
date: str
|
||||
max_points: int
|
||||
achieved_points: int
|
||||
grade: str
|
||||
percentage: float
|
||||
corrections: List[Dict[str, Any]] # [{question, answer, points, feedback}]
|
||||
teacher_notes: str = ""
|
||||
ai_feedback: str = ""
|
||||
grade_distribution: Optional[Dict[str, int]] = None # {note: anzahl}
|
||||
class_average: Optional[float] = None
|
||||
@@ -7,101 +7,37 @@ Shared Service für:
|
||||
- Correction (Korrektur-Übersichten)
|
||||
|
||||
Verwendet WeasyPrint für PDF-Rendering und Jinja2 für Templates.
|
||||
|
||||
Split structure:
|
||||
- pdf_models.py: Data classes (SchoolInfo, LetterData, CertificateData, etc.)
|
||||
- pdf_templates.py: Inline HTML templates (letter, certificate, correction)
|
||||
- pdf_service.py: Core PDFService class + convenience functions (this file)
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, List
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
from weasyprint import HTML, CSS
|
||||
from weasyprint.text.fonts import FontConfiguration
|
||||
|
||||
from .pdf_models import (
|
||||
SchoolInfo, LetterData, CertificateData, StudentInfo, CorrectionData,
|
||||
)
|
||||
from .pdf_templates import (
|
||||
get_letter_template_html,
|
||||
get_certificate_template_html,
|
||||
get_correction_template_html,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Template directory
|
||||
TEMPLATES_DIR = Path(__file__).parent.parent / "templates" / "pdf"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SchoolInfo:
|
||||
"""Schulinformationen für Header."""
|
||||
name: str
|
||||
address: str
|
||||
phone: str
|
||||
email: str
|
||||
logo_path: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
principal: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class LetterData:
|
||||
"""Daten für Elternbrief-PDF."""
|
||||
recipient_name: str
|
||||
recipient_address: str
|
||||
student_name: str
|
||||
student_class: str
|
||||
subject: str
|
||||
content: str
|
||||
date: str
|
||||
teacher_name: str
|
||||
teacher_title: Optional[str] = None
|
||||
school_info: Optional[SchoolInfo] = None
|
||||
letter_type: str = "general" # general, halbjahr, fehlzeiten, elternabend, lob
|
||||
tone: str = "professional"
|
||||
legal_references: Optional[List[Dict[str, str]]] = None
|
||||
gfk_principles_applied: Optional[List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CertificateData:
|
||||
"""Daten für Zeugnis-PDF."""
|
||||
student_name: str
|
||||
student_birthdate: str
|
||||
student_class: str
|
||||
school_year: str
|
||||
certificate_type: str # halbjahr, jahres, abschluss
|
||||
subjects: List[Dict[str, Any]] # [{name, grade, note}]
|
||||
attendance: Dict[str, int] # {days_absent, days_excused, days_unexcused}
|
||||
remarks: Optional[str] = None
|
||||
class_teacher: str = ""
|
||||
principal: str = ""
|
||||
school_info: Optional[SchoolInfo] = None
|
||||
issue_date: str = ""
|
||||
social_behavior: Optional[str] = None # A, B, C, D
|
||||
work_behavior: Optional[str] = None # A, B, C, D
|
||||
|
||||
|
||||
@dataclass
|
||||
class StudentInfo:
|
||||
"""Schülerinformationen für Korrektur-PDFs."""
|
||||
student_id: str
|
||||
name: str
|
||||
class_name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class CorrectionData:
|
||||
"""Daten für Korrektur-Übersicht PDF."""
|
||||
student: StudentInfo
|
||||
exam_title: str
|
||||
subject: str
|
||||
date: str
|
||||
max_points: int
|
||||
achieved_points: int
|
||||
grade: str
|
||||
percentage: float
|
||||
corrections: List[Dict[str, Any]] # [{question, answer, points, feedback}]
|
||||
teacher_notes: str = ""
|
||||
ai_feedback: str = ""
|
||||
grade_distribution: Optional[Dict[str, int]] = None # {note: anzahl}
|
||||
class_average: Optional[float] = None
|
||||
|
||||
|
||||
class PDFService:
|
||||
"""
|
||||
Zentrale PDF-Generierung für BreakPilot.
|
||||
@@ -113,18 +49,9 @@ class PDFService:
|
||||
"""
|
||||
|
||||
def __init__(self, templates_dir: Optional[Path] = None):
|
||||
"""
|
||||
Initialisiert den PDF-Service.
|
||||
|
||||
Args:
|
||||
templates_dir: Optionaler Pfad zu Templates (Standard: backend/templates/pdf)
|
||||
"""
|
||||
self.templates_dir = templates_dir or TEMPLATES_DIR
|
||||
|
||||
# Ensure templates directory exists
|
||||
self.templates_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Initialize Jinja2 environment
|
||||
self.jinja_env = Environment(
|
||||
loader=FileSystemLoader(str(self.templates_dir)),
|
||||
autoescape=select_autoescape(['html', 'xml']),
|
||||
@@ -132,13 +59,10 @@ class PDFService:
|
||||
lstrip_blocks=True
|
||||
)
|
||||
|
||||
# Add custom filters
|
||||
self.jinja_env.filters['date_format'] = self._date_format
|
||||
self.jinja_env.filters['grade_color'] = self._grade_color
|
||||
|
||||
# Font configuration for WeasyPrint
|
||||
self.font_config = FontConfiguration()
|
||||
|
||||
logger.info(f"PDFService initialized with templates from {self.templates_dir}")
|
||||
|
||||
@staticmethod
|
||||
@@ -156,16 +80,9 @@ class PDFService:
|
||||
def _grade_color(grade: str) -> str:
|
||||
"""Gibt Farbe basierend auf Note zurück."""
|
||||
grade_colors = {
|
||||
"1": "#27ae60", # Grün
|
||||
"2": "#2ecc71", # Hellgrün
|
||||
"3": "#f1c40f", # Gelb
|
||||
"4": "#e67e22", # Orange
|
||||
"5": "#e74c3c", # Rot
|
||||
"6": "#c0392b", # Dunkelrot
|
||||
"A": "#27ae60",
|
||||
"B": "#2ecc71",
|
||||
"C": "#f1c40f",
|
||||
"D": "#e74c3c",
|
||||
"1": "#27ae60", "2": "#2ecc71", "3": "#f1c40f",
|
||||
"4": "#e67e22", "5": "#e74c3c", "6": "#c0392b",
|
||||
"A": "#27ae60", "B": "#2ecc71", "C": "#f1c40f", "D": "#e74c3c",
|
||||
}
|
||||
return grade_colors.get(str(grade), "#333333")
|
||||
|
||||
@@ -181,291 +98,73 @@ class PDFService:
|
||||
color: #666;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'DejaVu Sans', 'Liberation Sans', Arial, sans-serif;
|
||||
font-size: 11pt;
|
||||
line-height: 1.5;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
h1, h2, h3 {
|
||||
font-weight: bold;
|
||||
margin-top: 1em;
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
|
||||
h1 { font-size: 16pt; }
|
||||
h2 { font-size: 14pt; }
|
||||
h3 { font-size: 12pt; }
|
||||
|
||||
.header {
|
||||
border-bottom: 2px solid #2c3e50;
|
||||
padding-bottom: 15px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.school-name {
|
||||
font-size: 18pt;
|
||||
font-weight: bold;
|
||||
color: #2c3e50;
|
||||
}
|
||||
|
||||
.school-info {
|
||||
font-size: 9pt;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.letter-date {
|
||||
text-align: right;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.recipient {
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.subject {
|
||||
font-weight: bold;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.content {
|
||||
text-align: justify;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.signature {
|
||||
margin-top: 40px;
|
||||
}
|
||||
|
||||
.legal-references {
|
||||
font-size: 9pt;
|
||||
color: #666;
|
||||
border-top: 1px solid #ddd;
|
||||
margin-top: 30px;
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.gfk-badge {
|
||||
display: inline-block;
|
||||
background: #e8f5e9;
|
||||
color: #27ae60;
|
||||
font-size: 8pt;
|
||||
padding: 2px 8px;
|
||||
border-radius: 10px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
/* Zeugnis-Styles */
|
||||
.certificate-header {
|
||||
text-align: center;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.certificate-title {
|
||||
font-size: 20pt;
|
||||
font-weight: bold;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.student-info {
|
||||
margin-bottom: 20px;
|
||||
padding: 15px;
|
||||
background: #f9f9f9;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.grades-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.grades-table th,
|
||||
.grades-table td {
|
||||
border: 1px solid #ddd;
|
||||
padding: 8px 12px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.grades-table th {
|
||||
background: #2c3e50;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.grades-table tr:nth-child(even) {
|
||||
background: #f9f9f9;
|
||||
}
|
||||
|
||||
.grade-cell {
|
||||
text-align: center;
|
||||
font-weight: bold;
|
||||
font-size: 12pt;
|
||||
}
|
||||
|
||||
.attendance-box {
|
||||
background: #fff3cd;
|
||||
padding: 15px;
|
||||
border-radius: 5px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.signatures-row {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-top: 50px;
|
||||
}
|
||||
|
||||
.signature-block {
|
||||
text-align: center;
|
||||
width: 40%;
|
||||
}
|
||||
|
||||
.signature-line {
|
||||
border-top: 1px solid #333;
|
||||
margin-top: 40px;
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
/* Korrektur-Styles */
|
||||
.exam-header {
|
||||
background: #2c3e50;
|
||||
color: white;
|
||||
padding: 15px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.result-box {
|
||||
background: #e8f5e9;
|
||||
padding: 20px;
|
||||
text-align: center;
|
||||
margin-bottom: 20px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.result-grade {
|
||||
font-size: 36pt;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.result-points {
|
||||
font-size: 14pt;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.corrections-list {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.correction-item {
|
||||
border: 1px solid #ddd;
|
||||
padding: 15px;
|
||||
margin-bottom: 10px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.correction-question {
|
||||
font-weight: bold;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.correction-feedback {
|
||||
background: #fff8e1;
|
||||
padding: 10px;
|
||||
margin-top: 10px;
|
||||
border-left: 3px solid #ffc107;
|
||||
font-size: 10pt;
|
||||
}
|
||||
|
||||
.stats-table {
|
||||
width: 100%;
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.stats-table td {
|
||||
padding: 5px 10px;
|
||||
font-size: 11pt; line-height: 1.5; color: #333;
|
||||
}
|
||||
h1, h2, h3 { font-weight: bold; margin-top: 1em; margin-bottom: 0.5em; }
|
||||
h1 { font-size: 16pt; } h2 { font-size: 14pt; } h3 { font-size: 12pt; }
|
||||
.header { border-bottom: 2px solid #2c3e50; padding-bottom: 15px; margin-bottom: 20px; }
|
||||
.school-name { font-size: 18pt; font-weight: bold; color: #2c3e50; }
|
||||
.school-info { font-size: 9pt; color: #666; }
|
||||
.letter-date { text-align: right; margin-bottom: 20px; }
|
||||
.recipient { margin-bottom: 30px; }
|
||||
.subject { font-weight: bold; margin-bottom: 20px; }
|
||||
.content { text-align: justify; margin-bottom: 30px; }
|
||||
.signature { margin-top: 40px; }
|
||||
.legal-references { font-size: 9pt; color: #666; border-top: 1px solid #ddd; margin-top: 30px; padding-top: 10px; }
|
||||
.gfk-badge { display: inline-block; background: #e8f5e9; color: #27ae60; font-size: 8pt; padding: 2px 8px; border-radius: 10px; margin-right: 5px; }
|
||||
.certificate-header { text-align: center; margin-bottom: 30px; }
|
||||
.certificate-title { font-size: 20pt; font-weight: bold; margin-bottom: 10px; }
|
||||
.student-info { margin-bottom: 20px; padding: 15px; background: #f9f9f9; border-radius: 5px; }
|
||||
.grades-table { width: 100%; border-collapse: collapse; margin-bottom: 20px; }
|
||||
.grades-table th, .grades-table td { border: 1px solid #ddd; padding: 8px 12px; text-align: left; }
|
||||
.grades-table th { background: #2c3e50; color: white; }
|
||||
.grades-table tr:nth-child(even) { background: #f9f9f9; }
|
||||
.grade-cell { text-align: center; font-weight: bold; font-size: 12pt; }
|
||||
.attendance-box { background: #fff3cd; padding: 15px; border-radius: 5px; margin-bottom: 20px; }
|
||||
.signatures-row { display: flex; justify-content: space-between; margin-top: 50px; }
|
||||
.signature-block { text-align: center; width: 40%; }
|
||||
.signature-line { border-top: 1px solid #333; margin-top: 40px; padding-top: 5px; }
|
||||
.exam-header { background: #2c3e50; color: white; padding: 15px; margin-bottom: 20px; }
|
||||
.result-box { background: #e8f5e9; padding: 20px; text-align: center; margin-bottom: 20px; border-radius: 5px; }
|
||||
.result-grade { font-size: 36pt; font-weight: bold; }
|
||||
.result-points { font-size: 14pt; color: #666; }
|
||||
.corrections-list { margin-bottom: 20px; }
|
||||
.correction-item { border: 1px solid #ddd; padding: 15px; margin-bottom: 10px; border-radius: 5px; }
|
||||
.correction-question { font-weight: bold; margin-bottom: 5px; }
|
||||
.correction-feedback { background: #fff8e1; padding: 10px; margin-top: 10px; border-left: 3px solid #ffc107; font-size: 10pt; }
|
||||
.stats-table { width: 100%; margin-top: 20px; }
|
||||
.stats-table td { padding: 5px 10px; }
|
||||
"""
|
||||
|
||||
def generate_letter_pdf(self, data: LetterData) -> bytes:
|
||||
"""
|
||||
Generiert PDF für Elternbrief.
|
||||
|
||||
Args:
|
||||
data: LetterData mit allen Briefinformationen
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Generiert PDF für Elternbrief."""
|
||||
logger.info(f"Generating letter PDF for student: {data.student_name}")
|
||||
|
||||
template = self._get_letter_template()
|
||||
html_content = template.render(
|
||||
data=data,
|
||||
generated_at=datetime.now().strftime("%d.%m.%Y %H:%M")
|
||||
)
|
||||
|
||||
html_content = template.render(data=data, generated_at=datetime.now().strftime("%d.%m.%Y %H:%M"))
|
||||
css = CSS(string=self._get_base_css(), font_config=self.font_config)
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(
|
||||
stylesheets=[css],
|
||||
font_config=self.font_config
|
||||
)
|
||||
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(stylesheets=[css], font_config=self.font_config)
|
||||
logger.info(f"Letter PDF generated: {len(pdf_bytes)} bytes")
|
||||
return pdf_bytes
|
||||
|
||||
def generate_certificate_pdf(self, data: CertificateData) -> bytes:
|
||||
"""
|
||||
Generiert PDF für Schulzeugnis.
|
||||
|
||||
Args:
|
||||
data: CertificateData mit allen Zeugnisinformationen
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Generiert PDF für Schulzeugnis."""
|
||||
logger.info(f"Generating certificate PDF for: {data.student_name}")
|
||||
|
||||
template = self._get_certificate_template()
|
||||
html_content = template.render(
|
||||
data=data,
|
||||
generated_at=datetime.now().strftime("%d.%m.%Y %H:%M")
|
||||
)
|
||||
|
||||
html_content = template.render(data=data, generated_at=datetime.now().strftime("%d.%m.%Y %H:%M"))
|
||||
css = CSS(string=self._get_base_css(), font_config=self.font_config)
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(
|
||||
stylesheets=[css],
|
||||
font_config=self.font_config
|
||||
)
|
||||
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(stylesheets=[css], font_config=self.font_config)
|
||||
logger.info(f"Certificate PDF generated: {len(pdf_bytes)} bytes")
|
||||
return pdf_bytes
|
||||
|
||||
def generate_correction_pdf(self, data: CorrectionData) -> bytes:
|
||||
"""
|
||||
Generiert PDF für Korrektur-Übersicht.
|
||||
|
||||
Args:
|
||||
data: CorrectionData mit allen Korrekturinformationen
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Generiert PDF für Korrektur-Übersicht."""
|
||||
logger.info(f"Generating correction PDF for: {data.student.name}")
|
||||
|
||||
template = self._get_correction_template()
|
||||
html_content = template.render(
|
||||
data=data,
|
||||
generated_at=datetime.now().strftime("%d.%m.%Y %H:%M")
|
||||
)
|
||||
|
||||
html_content = template.render(data=data, generated_at=datetime.now().strftime("%d.%m.%Y %H:%M"))
|
||||
css = CSS(string=self._get_base_css(), font_config=self.font_config)
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(
|
||||
stylesheets=[css],
|
||||
font_config=self.font_config
|
||||
)
|
||||
|
||||
pdf_bytes = HTML(string=html_content).write_pdf(stylesheets=[css], font_config=self.font_config)
|
||||
logger.info(f"Correction PDF generated: {len(pdf_bytes)} bytes")
|
||||
return pdf_bytes
|
||||
|
||||
@@ -474,321 +173,27 @@ class PDFService:
|
||||
template_path = self.templates_dir / "letter.html"
|
||||
if template_path.exists():
|
||||
return self.jinja_env.get_template("letter.html")
|
||||
|
||||
# Inline-Template als Fallback
|
||||
return self.jinja_env.from_string(self._get_letter_template_html())
|
||||
return self.jinja_env.from_string(get_letter_template_html())
|
||||
|
||||
def _get_certificate_template(self):
|
||||
"""Gibt Certificate-Template zurück."""
|
||||
template_path = self.templates_dir / "certificate.html"
|
||||
if template_path.exists():
|
||||
return self.jinja_env.get_template("certificate.html")
|
||||
|
||||
return self.jinja_env.from_string(self._get_certificate_template_html())
|
||||
return self.jinja_env.from_string(get_certificate_template_html())
|
||||
|
||||
def _get_correction_template(self):
|
||||
"""Gibt Correction-Template zurück."""
|
||||
template_path = self.templates_dir / "correction.html"
|
||||
if template_path.exists():
|
||||
return self.jinja_env.get_template("correction.html")
|
||||
|
||||
return self.jinja_env.from_string(self._get_correction_template_html())
|
||||
|
||||
@staticmethod
|
||||
def _get_letter_template_html() -> str:
|
||||
"""Inline HTML-Template für Elternbriefe."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>{{ data.subject }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header">
|
||||
{% if data.school_info %}
|
||||
<div class="school-name">{{ data.school_info.name }}</div>
|
||||
<div class="school-info">
|
||||
{{ data.school_info.address }}<br>
|
||||
Tel: {{ data.school_info.phone }} | E-Mail: {{ data.school_info.email }}
|
||||
{% if data.school_info.website %} | {{ data.school_info.website }}{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="school-name">Schule</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="letter-date">
|
||||
{{ data.date }}
|
||||
</div>
|
||||
|
||||
<div class="recipient">
|
||||
{{ data.recipient_name }}<br>
|
||||
{{ data.recipient_address | replace('\\n', '<br>') | safe }}
|
||||
</div>
|
||||
|
||||
<div class="subject">
|
||||
Betreff: {{ data.subject }}
|
||||
</div>
|
||||
|
||||
<div class="meta-info" style="font-size: 10pt; color: #666; margin-bottom: 20px;">
|
||||
Schüler/in: {{ data.student_name }} | Klasse: {{ data.student_class }}
|
||||
</div>
|
||||
|
||||
<div class="content">
|
||||
{{ data.content | replace('\\n', '<br>') | safe }}
|
||||
</div>
|
||||
|
||||
{% if data.gfk_principles_applied %}
|
||||
<div style="margin-bottom: 20px;">
|
||||
{% for principle in data.gfk_principles_applied %}
|
||||
<span class="gfk-badge">✓ {{ principle }}</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="signature">
|
||||
<p>Mit freundlichen Grüßen</p>
|
||||
<p style="margin-top: 30px;">
|
||||
{{ data.teacher_name }}
|
||||
{% if data.teacher_title %}<br><span style="font-size: 10pt;">{{ data.teacher_title }}</span>{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{% if data.legal_references %}
|
||||
<div class="legal-references">
|
||||
<strong>Rechtliche Grundlagen:</strong><br>
|
||||
{% for ref in data.legal_references %}
|
||||
• {{ ref.law }} {{ ref.paragraph }}: {{ ref.title }}<br>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="font-size: 8pt; color: #999; margin-top: 30px; text-align: center;">
|
||||
Erstellt mit BreakPilot | {{ generated_at }}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _get_certificate_template_html() -> str:
|
||||
"""Inline HTML-Template für Zeugnisse."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Zeugnis - {{ data.student_name }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="certificate-header">
|
||||
{% if data.school_info %}
|
||||
<div class="school-name" style="font-size: 14pt;">{{ data.school_info.name }}</div>
|
||||
{% endif %}
|
||||
<div class="certificate-title">
|
||||
{% if data.certificate_type == 'halbjahr' %}
|
||||
Halbjahreszeugnis
|
||||
{% elif data.certificate_type == 'jahres' %}
|
||||
Jahreszeugnis
|
||||
{% else %}
|
||||
Abschlusszeugnis
|
||||
{% endif %}
|
||||
</div>
|
||||
<div>Schuljahr {{ data.school_year }}</div>
|
||||
</div>
|
||||
|
||||
<div class="student-info">
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<td><strong>Name:</strong> {{ data.student_name }}</td>
|
||||
<td><strong>Geburtsdatum:</strong> {{ data.student_birthdate }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Klasse:</strong> {{ data.student_class }}</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<h3>Leistungen</h3>
|
||||
<table class="grades-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width: 70%;">Fach</th>
|
||||
<th style="width: 15%;">Note</th>
|
||||
<th style="width: 15%;">Punkte</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for subject in data.subjects %}
|
||||
<tr>
|
||||
<td>{{ subject.name }}</td>
|
||||
<td class="grade-cell" style="color: {{ subject.grade | grade_color }};">
|
||||
{{ subject.grade }}
|
||||
</td>
|
||||
<td class="grade-cell">{{ subject.points | default('-') }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{% if data.social_behavior or data.work_behavior %}
|
||||
<h3>Verhalten</h3>
|
||||
<table class="grades-table" style="width: 50%;">
|
||||
{% if data.social_behavior %}
|
||||
<tr>
|
||||
<td>Sozialverhalten</td>
|
||||
<td class="grade-cell">{{ data.social_behavior }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% if data.work_behavior %}
|
||||
<tr>
|
||||
<td>Arbeitsverhalten</td>
|
||||
<td class="grade-cell">{{ data.work_behavior }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
{% endif %}
|
||||
|
||||
<div class="attendance-box">
|
||||
<strong>Versäumte Tage:</strong> {{ data.attendance.days_absent | default(0) }}
|
||||
(davon entschuldigt: {{ data.attendance.days_excused | default(0) }},
|
||||
unentschuldigt: {{ data.attendance.days_unexcused | default(0) }})
|
||||
</div>
|
||||
|
||||
{% if data.remarks %}
|
||||
<div style="margin-bottom: 20px;">
|
||||
<strong>Bemerkungen:</strong><br>
|
||||
{{ data.remarks }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="margin-top: 30px;">
|
||||
<strong>Ausgestellt am:</strong> {{ data.issue_date }}
|
||||
</div>
|
||||
|
||||
<div class="signatures-row">
|
||||
<div class="signature-block">
|
||||
<div class="signature-line">{{ data.class_teacher }}</div>
|
||||
<div style="font-size: 9pt;">Klassenlehrer/in</div>
|
||||
</div>
|
||||
<div class="signature-block">
|
||||
<div class="signature-line">{{ data.principal }}</div>
|
||||
<div style="font-size: 9pt;">Schulleiter/in</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="text-align: center; margin-top: 40px;">
|
||||
<div style="font-size: 9pt; color: #666;">Siegel der Schule</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _get_correction_template_html() -> str:
|
||||
"""Inline HTML-Template für Korrektur-Übersichten."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Korrektur - {{ data.exam_title }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="exam-header">
|
||||
<h1 style="margin: 0; color: white;">{{ data.exam_title }}</h1>
|
||||
<div>{{ data.subject }} | {{ data.date }}</div>
|
||||
</div>
|
||||
|
||||
<div class="student-info">
|
||||
<strong>{{ data.student.name }}</strong> | Klasse {{ data.student.class_name }}
|
||||
</div>
|
||||
|
||||
<div class="result-box">
|
||||
<div class="result-grade" style="color: {{ data.grade | grade_color }};">
|
||||
Note: {{ data.grade }}
|
||||
</div>
|
||||
<div class="result-points">
|
||||
{{ data.achieved_points }} von {{ data.max_points }} Punkten
|
||||
({{ data.percentage | round(1) }}%)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h3>Detaillierte Auswertung</h3>
|
||||
<div class="corrections-list">
|
||||
{% for item in data.corrections %}
|
||||
<div class="correction-item">
|
||||
<div class="correction-question">
|
||||
{{ item.question }}
|
||||
</div>
|
||||
{% if item.answer %}
|
||||
<div style="margin: 5px 0; font-style: italic; color: #555;">
|
||||
<strong>Antwort:</strong> {{ item.answer }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div>
|
||||
<strong>Punkte:</strong> {{ item.points }}
|
||||
</div>
|
||||
{% if item.feedback %}
|
||||
<div class="correction-feedback">
|
||||
{{ item.feedback }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if data.teacher_notes %}
|
||||
<div style="background: #e3f2fd; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
|
||||
<strong>Lehrerkommentar:</strong><br>
|
||||
{{ data.teacher_notes }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data.ai_feedback %}
|
||||
<div style="background: #f3e5f5; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
|
||||
<strong>KI-Feedback:</strong><br>
|
||||
{{ data.ai_feedback }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data.class_average or data.grade_distribution %}
|
||||
<h3>Klassenstatistik</h3>
|
||||
<table class="stats-table">
|
||||
{% if data.class_average %}
|
||||
<tr>
|
||||
<td><strong>Klassendurchschnitt:</strong></td>
|
||||
<td>{{ data.class_average }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% if data.grade_distribution %}
|
||||
<tr>
|
||||
<td><strong>Notenverteilung:</strong></td>
|
||||
<td>
|
||||
{% for grade, count in data.grade_distribution.items() %}
|
||||
Note {{ grade }}: {{ count }}x{% if not loop.last %}, {% endif %}
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
{% endif %}
|
||||
|
||||
<div class="signature" style="margin-top: 40px;">
|
||||
<p style="font-size: 9pt; color: #666;">Datum: {{ data.date }}</p>
|
||||
</div>
|
||||
|
||||
<div style="font-size: 8pt; color: #999; margin-top: 30px; text-align: center;">
|
||||
Erstellt mit BreakPilot | {{ generated_at }}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return self.jinja_env.from_string(get_correction_template_html())
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Convenience functions for direct usage
|
||||
# =============================================================================
|
||||
|
||||
_pdf_service: Optional[PDFService] = None
|
||||
|
||||
|
||||
@@ -801,18 +206,8 @@ def get_pdf_service() -> PDFService:
|
||||
|
||||
|
||||
def generate_letter_pdf(data: Dict[str, Any]) -> bytes:
|
||||
"""
|
||||
Convenience function zum Generieren eines Elternbrief-PDFs.
|
||||
|
||||
Args:
|
||||
data: Dict mit allen Briefdaten
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Convenience function zum Generieren eines Elternbrief-PDFs."""
|
||||
service = get_pdf_service()
|
||||
|
||||
# Convert dict to LetterData
|
||||
school_info = None
|
||||
if data.get("school_info"):
|
||||
school_info = SchoolInfo(**data["school_info"])
|
||||
@@ -833,22 +228,12 @@ def generate_letter_pdf(data: Dict[str, Any]) -> bytes:
|
||||
legal_references=data.get("legal_references"),
|
||||
gfk_principles_applied=data.get("gfk_principles_applied")
|
||||
)
|
||||
|
||||
return service.generate_letter_pdf(letter_data)
|
||||
|
||||
|
||||
def generate_certificate_pdf(data: Dict[str, Any]) -> bytes:
|
||||
"""
|
||||
Convenience function zum Generieren eines Zeugnis-PDFs.
|
||||
|
||||
Args:
|
||||
data: Dict mit allen Zeugnisdaten
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Convenience function zum Generieren eines Zeugnis-PDFs."""
|
||||
service = get_pdf_service()
|
||||
|
||||
school_info = None
|
||||
if data.get("school_info"):
|
||||
school_info = SchoolInfo(**data["school_info"])
|
||||
@@ -869,30 +254,19 @@ def generate_certificate_pdf(data: Dict[str, Any]) -> bytes:
|
||||
social_behavior=data.get("social_behavior"),
|
||||
work_behavior=data.get("work_behavior")
|
||||
)
|
||||
|
||||
return service.generate_certificate_pdf(cert_data)
|
||||
|
||||
|
||||
def generate_correction_pdf(data: Dict[str, Any]) -> bytes:
|
||||
"""
|
||||
Convenience function zum Generieren eines Korrektur-PDFs.
|
||||
|
||||
Args:
|
||||
data: Dict mit allen Korrekturdaten
|
||||
|
||||
Returns:
|
||||
PDF als bytes
|
||||
"""
|
||||
"""Convenience function zum Generieren eines Korrektur-PDFs."""
|
||||
service = get_pdf_service()
|
||||
|
||||
# Create StudentInfo from dict
|
||||
student = StudentInfo(
|
||||
student_id=data.get("student_id", "unknown"),
|
||||
name=data.get("student_name", data.get("name", "")),
|
||||
class_name=data.get("student_class", data.get("class_name", ""))
|
||||
)
|
||||
|
||||
# Calculate percentage if not provided
|
||||
max_points = data.get("max_points", data.get("total_points", 0))
|
||||
achieved_points = data.get("achieved_points", 0)
|
||||
percentage = data.get("percentage", (achieved_points / max_points * 100) if max_points > 0 else 0.0)
|
||||
@@ -912,5 +286,4 @@ def generate_correction_pdf(data: Dict[str, Any]) -> bytes:
|
||||
grade_distribution=data.get("grade_distribution"),
|
||||
class_average=data.get("class_average")
|
||||
)
|
||||
|
||||
return service.generate_correction_pdf(correction_data)
|
||||
|
||||
298
backend-lehrer/services/pdf_templates.py
Normal file
298
backend-lehrer/services/pdf_templates.py
Normal file
@@ -0,0 +1,298 @@
|
||||
"""
|
||||
PDF Service - Inline HTML Templates.
|
||||
|
||||
Fallback templates when external template files don't exist.
|
||||
"""
|
||||
|
||||
|
||||
def get_letter_template_html() -> str:
|
||||
"""Inline HTML-Template für Elternbriefe."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>{{ data.subject }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header">
|
||||
{% if data.school_info %}
|
||||
<div class="school-name">{{ data.school_info.name }}</div>
|
||||
<div class="school-info">
|
||||
{{ data.school_info.address }}<br>
|
||||
Tel: {{ data.school_info.phone }} | E-Mail: {{ data.school_info.email }}
|
||||
{% if data.school_info.website %} | {{ data.school_info.website }}{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="school-name">Schule</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="letter-date">
|
||||
{{ data.date }}
|
||||
</div>
|
||||
|
||||
<div class="recipient">
|
||||
{{ data.recipient_name }}<br>
|
||||
{{ data.recipient_address | replace('\\n', '<br>') | safe }}
|
||||
</div>
|
||||
|
||||
<div class="subject">
|
||||
Betreff: {{ data.subject }}
|
||||
</div>
|
||||
|
||||
<div class="meta-info" style="font-size: 10pt; color: #666; margin-bottom: 20px;">
|
||||
Schüler/in: {{ data.student_name }} | Klasse: {{ data.student_class }}
|
||||
</div>
|
||||
|
||||
<div class="content">
|
||||
{{ data.content | replace('\\n', '<br>') | safe }}
|
||||
</div>
|
||||
|
||||
{% if data.gfk_principles_applied %}
|
||||
<div style="margin-bottom: 20px;">
|
||||
{% for principle in data.gfk_principles_applied %}
|
||||
<span class="gfk-badge">✓ {{ principle }}</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="signature">
|
||||
<p>Mit freundlichen Grüßen</p>
|
||||
<p style="margin-top: 30px;">
|
||||
{{ data.teacher_name }}
|
||||
{% if data.teacher_title %}<br><span style="font-size: 10pt;">{{ data.teacher_title }}</span>{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{% if data.legal_references %}
|
||||
<div class="legal-references">
|
||||
<strong>Rechtliche Grundlagen:</strong><br>
|
||||
{% for ref in data.legal_references %}
|
||||
• {{ ref.law }} {{ ref.paragraph }}: {{ ref.title }}<br>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="font-size: 8pt; color: #999; margin-top: 30px; text-align: center;">
|
||||
Erstellt mit BreakPilot | {{ generated_at }}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def get_certificate_template_html() -> str:
|
||||
"""Inline HTML-Template für Zeugnisse."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Zeugnis - {{ data.student_name }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="certificate-header">
|
||||
{% if data.school_info %}
|
||||
<div class="school-name" style="font-size: 14pt;">{{ data.school_info.name }}</div>
|
||||
{% endif %}
|
||||
<div class="certificate-title">
|
||||
{% if data.certificate_type == 'halbjahr' %}
|
||||
Halbjahreszeugnis
|
||||
{% elif data.certificate_type == 'jahres' %}
|
||||
Jahreszeugnis
|
||||
{% else %}
|
||||
Abschlusszeugnis
|
||||
{% endif %}
|
||||
</div>
|
||||
<div>Schuljahr {{ data.school_year }}</div>
|
||||
</div>
|
||||
|
||||
<div class="student-info">
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<td><strong>Name:</strong> {{ data.student_name }}</td>
|
||||
<td><strong>Geburtsdatum:</strong> {{ data.student_birthdate }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Klasse:</strong> {{ data.student_class }}</td>
|
||||
<td> </td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<h3>Leistungen</h3>
|
||||
<table class="grades-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width: 70%;">Fach</th>
|
||||
<th style="width: 15%;">Note</th>
|
||||
<th style="width: 15%;">Punkte</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for subject in data.subjects %}
|
||||
<tr>
|
||||
<td>{{ subject.name }}</td>
|
||||
<td class="grade-cell" style="color: {{ subject.grade | grade_color }};">
|
||||
{{ subject.grade }}
|
||||
</td>
|
||||
<td class="grade-cell">{{ subject.points | default('-') }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{% if data.social_behavior or data.work_behavior %}
|
||||
<h3>Verhalten</h3>
|
||||
<table class="grades-table" style="width: 50%;">
|
||||
{% if data.social_behavior %}
|
||||
<tr>
|
||||
<td>Sozialverhalten</td>
|
||||
<td class="grade-cell">{{ data.social_behavior }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% if data.work_behavior %}
|
||||
<tr>
|
||||
<td>Arbeitsverhalten</td>
|
||||
<td class="grade-cell">{{ data.work_behavior }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
{% endif %}
|
||||
|
||||
<div class="attendance-box">
|
||||
<strong>Versäumte Tage:</strong> {{ data.attendance.days_absent | default(0) }}
|
||||
(davon entschuldigt: {{ data.attendance.days_excused | default(0) }},
|
||||
unentschuldigt: {{ data.attendance.days_unexcused | default(0) }})
|
||||
</div>
|
||||
|
||||
{% if data.remarks %}
|
||||
<div style="margin-bottom: 20px;">
|
||||
<strong>Bemerkungen:</strong><br>
|
||||
{{ data.remarks }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div style="margin-top: 30px;">
|
||||
<strong>Ausgestellt am:</strong> {{ data.issue_date }}
|
||||
</div>
|
||||
|
||||
<div class="signatures-row">
|
||||
<div class="signature-block">
|
||||
<div class="signature-line">{{ data.class_teacher }}</div>
|
||||
<div style="font-size: 9pt;">Klassenlehrer/in</div>
|
||||
</div>
|
||||
<div class="signature-block">
|
||||
<div class="signature-line">{{ data.principal }}</div>
|
||||
<div style="font-size: 9pt;">Schulleiter/in</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="text-align: center; margin-top: 40px;">
|
||||
<div style="font-size: 9pt; color: #666;">Siegel der Schule</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def get_correction_template_html() -> str:
|
||||
"""Inline HTML-Template für Korrektur-Übersichten."""
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Korrektur - {{ data.exam_title }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="exam-header">
|
||||
<h1 style="margin: 0; color: white;">{{ data.exam_title }}</h1>
|
||||
<div>{{ data.subject }} | {{ data.date }}</div>
|
||||
</div>
|
||||
|
||||
<div class="student-info">
|
||||
<strong>{{ data.student.name }}</strong> | Klasse {{ data.student.class_name }}
|
||||
</div>
|
||||
|
||||
<div class="result-box">
|
||||
<div class="result-grade" style="color: {{ data.grade | grade_color }};">
|
||||
Note: {{ data.grade }}
|
||||
</div>
|
||||
<div class="result-points">
|
||||
{{ data.achieved_points }} von {{ data.max_points }} Punkten
|
||||
({{ data.percentage | round(1) }}%)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h3>Detaillierte Auswertung</h3>
|
||||
<div class="corrections-list">
|
||||
{% for item in data.corrections %}
|
||||
<div class="correction-item">
|
||||
<div class="correction-question">
|
||||
{{ item.question }}
|
||||
</div>
|
||||
{% if item.answer %}
|
||||
<div style="margin: 5px 0; font-style: italic; color: #555;">
|
||||
<strong>Antwort:</strong> {{ item.answer }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div>
|
||||
<strong>Punkte:</strong> {{ item.points }}
|
||||
</div>
|
||||
{% if item.feedback %}
|
||||
<div class="correction-feedback">
|
||||
{{ item.feedback }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if data.teacher_notes %}
|
||||
<div style="background: #e3f2fd; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
|
||||
<strong>Lehrerkommentar:</strong><br>
|
||||
{{ data.teacher_notes }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data.ai_feedback %}
|
||||
<div style="background: #f3e5f5; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
|
||||
<strong>KI-Feedback:</strong><br>
|
||||
{{ data.ai_feedback }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data.class_average or data.grade_distribution %}
|
||||
<h3>Klassenstatistik</h3>
|
||||
<table class="stats-table">
|
||||
{% if data.class_average %}
|
||||
<tr>
|
||||
<td><strong>Klassendurchschnitt:</strong></td>
|
||||
<td>{{ data.class_average }}</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% if data.grade_distribution %}
|
||||
<tr>
|
||||
<td><strong>Notenverteilung:</strong></td>
|
||||
<td>
|
||||
{% for grade, count in data.grade_distribution.items() %}
|
||||
Note {{ grade }}: {{ count }}x{% if not loop.last %}, {% endif %}
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</table>
|
||||
{% endif %}
|
||||
|
||||
<div class="signature" style="margin-top: 40px;">
|
||||
<p style="font-size: 9pt; color: #666;">Datum: {{ data.date }}</p>
|
||||
</div>
|
||||
|
||||
<div style="font-size: 8pt; color: #999; margin-top: 30px; text-align: center;">
|
||||
Erstellt mit BreakPilot | {{ generated_at }}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
267
backend-lehrer/teacher_dashboard_analytics.py
Normal file
267
backend-lehrer/teacher_dashboard_analytics.py
Normal file
@@ -0,0 +1,267 @@
|
||||
# ==============================================
|
||||
# Teacher Dashboard - Analytics & Progress Routes
|
||||
# ==============================================
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
from teacher_dashboard_models import (
|
||||
UnitAssignmentStatus, TeacherControlSettings,
|
||||
UnitAssignment, StudentUnitProgress, ClassUnitProgress,
|
||||
MisconceptionReport, ClassAnalyticsSummary, ContentResource,
|
||||
get_current_teacher, get_teacher_database,
|
||||
get_classes_for_teacher, get_students_in_class,
|
||||
REQUIRE_AUTH,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["Teacher Dashboard"])
|
||||
|
||||
# Shared in-memory store reference (set from teacher_dashboard_api)
|
||||
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
|
||||
def set_assignments_store(store: Dict[str, Dict[str, Any]]):
|
||||
"""Share the in-memory assignments store from the main module."""
|
||||
global _assignments_store
|
||||
_assignments_store = store
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Progress & Analytics
|
||||
# ==============================================
|
||||
|
||||
@router.get("/assignments/{assignment_id}/progress", response_model=ClassUnitProgress)
|
||||
async def get_assignment_progress(
|
||||
assignment_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> ClassUnitProgress:
|
||||
"""Get detailed progress for an assignment."""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
students = await get_students_in_class(assignment["class_id"])
|
||||
student_progress = []
|
||||
total_completion = 0.0
|
||||
total_precheck = 0.0
|
||||
total_postcheck = 0.0
|
||||
total_time = 0
|
||||
precheck_count = 0
|
||||
postcheck_count = 0
|
||||
started = 0
|
||||
completed = 0
|
||||
|
||||
for student in students:
|
||||
student_id = student.get("id", student.get("student_id"))
|
||||
progress = StudentUnitProgress(
|
||||
student_id=student_id,
|
||||
student_name=student.get("name", f"Student {student_id[:8]}"),
|
||||
status="not_started", completion_rate=0.0, stops_completed=0, total_stops=0,
|
||||
)
|
||||
if db:
|
||||
try:
|
||||
session_data = await db.get_student_unit_session(
|
||||
student_id=student_id, unit_id=assignment["unit_id"]
|
||||
)
|
||||
if session_data:
|
||||
progress.session_id = session_data.get("session_id")
|
||||
progress.status = "completed" if session_data.get("completed_at") else "in_progress"
|
||||
progress.completion_rate = session_data.get("completion_rate", 0.0)
|
||||
progress.precheck_score = session_data.get("precheck_score")
|
||||
progress.postcheck_score = session_data.get("postcheck_score")
|
||||
progress.time_spent_minutes = session_data.get("duration_seconds", 0) // 60
|
||||
progress.last_activity = session_data.get("updated_at")
|
||||
progress.stops_completed = session_data.get("stops_completed", 0)
|
||||
progress.total_stops = session_data.get("total_stops", 0)
|
||||
if progress.precheck_score is not None and progress.postcheck_score is not None:
|
||||
progress.learning_gain = progress.postcheck_score - progress.precheck_score
|
||||
total_completion += progress.completion_rate
|
||||
total_time += progress.time_spent_minutes
|
||||
if progress.precheck_score is not None:
|
||||
total_precheck += progress.precheck_score
|
||||
precheck_count += 1
|
||||
if progress.postcheck_score is not None:
|
||||
total_postcheck += progress.postcheck_score
|
||||
postcheck_count += 1
|
||||
if progress.status != "not_started":
|
||||
started += 1
|
||||
if progress.status == "completed":
|
||||
completed += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get student progress: {e}")
|
||||
student_progress.append(progress)
|
||||
|
||||
total_students = len(students) or 1
|
||||
return ClassUnitProgress(
|
||||
assignment_id=assignment_id, unit_id=assignment["unit_id"],
|
||||
unit_title=f"Unit {assignment['unit_id']}", class_id=assignment["class_id"],
|
||||
class_name=f"Class {assignment['class_id'][:8]}", total_students=len(students),
|
||||
started_count=started, completed_count=completed,
|
||||
avg_completion_rate=total_completion / total_students,
|
||||
avg_precheck_score=total_precheck / precheck_count if precheck_count > 0 else None,
|
||||
avg_postcheck_score=total_postcheck / postcheck_count if postcheck_count > 0 else None,
|
||||
avg_learning_gain=(total_postcheck / postcheck_count - total_precheck / precheck_count)
|
||||
if precheck_count > 0 and postcheck_count > 0 else None,
|
||||
avg_time_minutes=total_time / started if started > 0 else 0,
|
||||
students=student_progress,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/classes/{class_id}/analytics", response_model=ClassAnalyticsSummary)
|
||||
async def get_class_analytics(
|
||||
class_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> ClassAnalyticsSummary:
|
||||
"""Get summary analytics for a class."""
|
||||
db = await get_teacher_database()
|
||||
assignments = []
|
||||
if db:
|
||||
try:
|
||||
assignments = await db.list_assignments(teacher_id=teacher["user_id"], class_id=class_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list assignments: {e}")
|
||||
if not assignments:
|
||||
assignments = [
|
||||
a for a in _assignments_store.values()
|
||||
if a["class_id"] == class_id and a["teacher_id"] == teacher["user_id"]
|
||||
]
|
||||
|
||||
total_units = len(assignments)
|
||||
completed_units = sum(1 for a in assignments if a.get("status") == "completed")
|
||||
active_units = sum(1 for a in assignments if a.get("status") == "active")
|
||||
|
||||
students = await get_students_in_class(class_id)
|
||||
student_scores = {}
|
||||
misconceptions = []
|
||||
if db:
|
||||
try:
|
||||
for student in students:
|
||||
student_id = student.get("id", student.get("student_id"))
|
||||
analytics = await db.get_student_analytics(student_id)
|
||||
if analytics:
|
||||
student_scores[student_id] = {
|
||||
"name": student.get("name", student_id[:8]),
|
||||
"avg_score": analytics.get("avg_postcheck_score", 0),
|
||||
"total_time": analytics.get("total_time_minutes", 0),
|
||||
}
|
||||
misconceptions_data = await db.get_class_misconceptions(class_id)
|
||||
for m in misconceptions_data:
|
||||
misconceptions.append(MisconceptionReport(
|
||||
concept_id=m["concept_id"], concept_label=m["concept_label"],
|
||||
misconception=m["misconception"], affected_students=m["affected_students"],
|
||||
frequency=m["frequency"], unit_id=m["unit_id"], stop_id=m["stop_id"],
|
||||
))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to aggregate analytics: {e}")
|
||||
|
||||
sorted_students = sorted(student_scores.items(), key=lambda x: x[1]["avg_score"], reverse=True)
|
||||
top_performers = [s[1]["name"] for s in sorted_students[:3]]
|
||||
struggling_students = [s[1]["name"] for s in sorted_students[-3:] if s[1]["avg_score"] < 0.6]
|
||||
total_time = sum(s["total_time"] for s in student_scores.values())
|
||||
avg_scores = [s["avg_score"] for s in student_scores.values() if s["avg_score"] > 0]
|
||||
avg_completion = sum(avg_scores) / len(avg_scores) if avg_scores else 0
|
||||
|
||||
return ClassAnalyticsSummary(
|
||||
class_id=class_id, class_name=f"Klasse {class_id[:8]}",
|
||||
total_units_assigned=total_units, units_completed=completed_units,
|
||||
active_units=active_units, avg_completion_rate=avg_completion,
|
||||
avg_learning_gain=None, total_time_hours=total_time / 60,
|
||||
top_performers=top_performers, struggling_students=struggling_students,
|
||||
common_misconceptions=misconceptions[:5],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/students/{student_id}/progress")
|
||||
async def get_student_progress(
|
||||
student_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> Dict[str, Any]:
|
||||
"""Get detailed progress for a specific student."""
|
||||
db = await get_teacher_database()
|
||||
if db:
|
||||
try:
|
||||
progress = await db.get_student_full_progress(student_id)
|
||||
return progress
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get student progress: {e}")
|
||||
return {
|
||||
"student_id": student_id, "units_attempted": 0, "units_completed": 0,
|
||||
"avg_score": 0.0, "total_time_minutes": 0, "sessions": [],
|
||||
}
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Content Resources
|
||||
# ==============================================
|
||||
|
||||
@router.get("/assignments/{assignment_id}/resources", response_model=List[ContentResource])
|
||||
async def get_assignment_resources(
|
||||
assignment_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher),
|
||||
request: Request = None
|
||||
) -> List[ContentResource]:
|
||||
"""Get generated content resources for an assignment."""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
unit_id = assignment["unit_id"]
|
||||
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:8000"
|
||||
return [
|
||||
ContentResource(resource_type="h5p", title=f"{unit_id} - H5P Aktivitaeten",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/h5p",
|
||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||
ContentResource(resource_type="worksheet", title=f"{unit_id} - Arbeitsblatt (HTML)",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet",
|
||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||
ContentResource(resource_type="pdf", title=f"{unit_id} - Arbeitsblatt (PDF)",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet.pdf",
|
||||
generated_at=datetime.utcnow(), unit_id=unit_id),
|
||||
]
|
||||
|
||||
|
||||
@router.post("/assignments/{assignment_id}/regenerate-content")
|
||||
async def regenerate_content(
|
||||
assignment_id: str,
|
||||
resource_type: str = Query("all", description="h5p, pdf, or all"),
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> Dict[str, Any]:
|
||||
"""Trigger regeneration of content resources."""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
logger.info(f"Content regeneration triggered for {assignment['unit_id']}: {resource_type}")
|
||||
return {
|
||||
"status": "queued", "assignment_id": assignment_id,
|
||||
"unit_id": assignment["unit_id"], "resource_type": resource_type,
|
||||
"message": "Content regeneration has been queued",
|
||||
}
|
||||
@@ -1,245 +1,42 @@
|
||||
# ==============================================
|
||||
# Breakpilot Drive - Teacher Dashboard API
|
||||
# ==============================================
|
||||
# Lehrer-Dashboard fuer Unit-Zuweisung und Analytics:
|
||||
# - Units zu Klassen zuweisen
|
||||
# - Schueler-Fortschritt einsehen
|
||||
# - Klassen-Analytics
|
||||
# - H5P und PDF Content verwalten
|
||||
# - Unit-Einstellungen pro Klasse
|
||||
# Lehrer-Dashboard fuer Unit-Zuweisung und Analytics.
|
||||
#
|
||||
# Split structure:
|
||||
# - teacher_dashboard_models.py: Models, Auth, DB/School helpers
|
||||
# - teacher_dashboard_analytics.py: Progress, analytics, content routes
|
||||
# - teacher_dashboard_api.py: Assignment CRUD, dashboard, units (this file)
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends, Request
|
||||
from pydantic import BaseModel, Field
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
import uuid
|
||||
import os
|
||||
import logging
|
||||
import httpx
|
||||
|
||||
from teacher_dashboard_models import (
|
||||
UnitAssignmentStatus, TeacherControlSettings, AssignUnitRequest,
|
||||
UnitAssignment,
|
||||
get_current_teacher, get_teacher_database,
|
||||
get_classes_for_teacher,
|
||||
REQUIRE_AUTH,
|
||||
)
|
||||
from teacher_dashboard_analytics import (
|
||||
router as analytics_router,
|
||||
set_assignments_store,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Feature flags
|
||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||
REQUIRE_AUTH = os.getenv("TEACHER_REQUIRE_AUTH", "true").lower() == "true"
|
||||
SCHOOL_SERVICE_URL = os.getenv("SCHOOL_SERVICE_URL", "http://school-service:8084")
|
||||
|
||||
router = APIRouter(prefix="/api/teacher", tags=["Teacher Dashboard"])
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Pydantic Models
|
||||
# ==============================================
|
||||
|
||||
class UnitAssignmentStatus(str, Enum):
|
||||
"""Status of a unit assignment"""
|
||||
DRAFT = "draft"
|
||||
ACTIVE = "active"
|
||||
COMPLETED = "completed"
|
||||
ARCHIVED = "archived"
|
||||
|
||||
|
||||
class TeacherControlSettings(BaseModel):
|
||||
"""Unit settings that teachers can configure"""
|
||||
allow_skip: bool = True
|
||||
allow_replay: bool = True
|
||||
max_time_per_stop_sec: int = 90
|
||||
show_hints: bool = True
|
||||
require_precheck: bool = True
|
||||
require_postcheck: bool = True
|
||||
|
||||
|
||||
class AssignUnitRequest(BaseModel):
|
||||
"""Request to assign a unit to a class"""
|
||||
unit_id: str
|
||||
class_id: str
|
||||
due_date: Optional[datetime] = None
|
||||
settings: Optional[TeacherControlSettings] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class UnitAssignment(BaseModel):
|
||||
"""Unit assignment record"""
|
||||
assignment_id: str
|
||||
unit_id: str
|
||||
class_id: str
|
||||
teacher_id: str
|
||||
status: UnitAssignmentStatus
|
||||
settings: TeacherControlSettings
|
||||
due_date: Optional[datetime] = None
|
||||
notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class StudentUnitProgress(BaseModel):
|
||||
"""Progress of a single student on a unit"""
|
||||
student_id: str
|
||||
student_name: str
|
||||
session_id: Optional[str] = None
|
||||
status: str # "not_started", "in_progress", "completed"
|
||||
completion_rate: float = 0.0
|
||||
precheck_score: Optional[float] = None
|
||||
postcheck_score: Optional[float] = None
|
||||
learning_gain: Optional[float] = None
|
||||
time_spent_minutes: int = 0
|
||||
last_activity: Optional[datetime] = None
|
||||
current_stop: Optional[str] = None
|
||||
stops_completed: int = 0
|
||||
total_stops: int = 0
|
||||
|
||||
|
||||
class ClassUnitProgress(BaseModel):
|
||||
"""Overall progress of a class on a unit"""
|
||||
assignment_id: str
|
||||
unit_id: str
|
||||
unit_title: str
|
||||
class_id: str
|
||||
class_name: str
|
||||
total_students: int
|
||||
started_count: int
|
||||
completed_count: int
|
||||
avg_completion_rate: float
|
||||
avg_precheck_score: Optional[float] = None
|
||||
avg_postcheck_score: Optional[float] = None
|
||||
avg_learning_gain: Optional[float] = None
|
||||
avg_time_minutes: float
|
||||
students: List[StudentUnitProgress]
|
||||
|
||||
|
||||
class MisconceptionReport(BaseModel):
|
||||
"""Report of detected misconceptions"""
|
||||
concept_id: str
|
||||
concept_label: str
|
||||
misconception: str
|
||||
affected_students: List[str]
|
||||
frequency: int
|
||||
unit_id: str
|
||||
stop_id: str
|
||||
|
||||
|
||||
class ClassAnalyticsSummary(BaseModel):
|
||||
"""Summary analytics for a class"""
|
||||
class_id: str
|
||||
class_name: str
|
||||
total_units_assigned: int
|
||||
units_completed: int
|
||||
active_units: int
|
||||
avg_completion_rate: float
|
||||
avg_learning_gain: Optional[float]
|
||||
total_time_hours: float
|
||||
top_performers: List[str]
|
||||
struggling_students: List[str]
|
||||
common_misconceptions: List[MisconceptionReport]
|
||||
|
||||
|
||||
class ContentResource(BaseModel):
|
||||
"""Generated content resource"""
|
||||
resource_type: str # "h5p", "pdf", "worksheet"
|
||||
title: str
|
||||
url: str
|
||||
generated_at: datetime
|
||||
unit_id: str
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Auth Dependency
|
||||
# ==============================================
|
||||
|
||||
async def get_current_teacher(request: Request) -> Dict[str, Any]:
|
||||
"""Get current teacher from JWT token."""
|
||||
if not REQUIRE_AUTH:
|
||||
# Dev mode: return demo teacher
|
||||
return {
|
||||
"user_id": "e9484ad9-32ee-4f2b-a4e1-d182e02ccf20",
|
||||
"email": "demo@breakpilot.app",
|
||||
"role": "teacher",
|
||||
"name": "Demo Lehrer"
|
||||
}
|
||||
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header.startswith("Bearer "):
|
||||
raise HTTPException(status_code=401, detail="Missing authorization token")
|
||||
|
||||
try:
|
||||
import jwt
|
||||
token = auth_header[7:]
|
||||
secret = os.getenv("JWT_SECRET", "dev-secret-key")
|
||||
payload = jwt.decode(token, secret, algorithms=["HS256"])
|
||||
|
||||
if payload.get("role") not in ["teacher", "admin"]:
|
||||
raise HTTPException(status_code=403, detail="Teacher or admin role required")
|
||||
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise HTTPException(status_code=401, detail="Token expired")
|
||||
except jwt.InvalidTokenError:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Database Integration
|
||||
# ==============================================
|
||||
|
||||
_teacher_db = None
|
||||
|
||||
async def get_teacher_database():
|
||||
"""Get teacher database instance with lazy initialization."""
|
||||
global _teacher_db
|
||||
if not USE_DATABASE:
|
||||
return None
|
||||
if _teacher_db is None:
|
||||
try:
|
||||
from unit.database import get_teacher_db
|
||||
_teacher_db = await get_teacher_db()
|
||||
logger.info("Teacher database initialized")
|
||||
except ImportError:
|
||||
logger.warning("Teacher database module not available")
|
||||
except Exception as e:
|
||||
logger.warning(f"Teacher database not available: {e}")
|
||||
return _teacher_db
|
||||
|
||||
|
||||
# ==============================================
|
||||
# School Service Integration
|
||||
# ==============================================
|
||||
|
||||
async def get_classes_for_teacher(teacher_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get classes assigned to a teacher from school service."""
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes",
|
||||
headers={"X-Teacher-ID": teacher_id}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get classes from school service: {e}")
|
||||
return []
|
||||
|
||||
|
||||
async def get_students_in_class(class_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get students in a class from school service."""
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes/{class_id}/students"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get students from school service: {e}")
|
||||
return []
|
||||
|
||||
|
||||
# ==============================================
|
||||
# In-Memory Storage (Fallback)
|
||||
# ==============================================
|
||||
|
||||
_assignments_store: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# Share the store with the analytics module and include its routes
|
||||
set_assignments_store(_assignments_store)
|
||||
router.include_router(analytics_router)
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Unit Assignment
|
||||
@@ -250,28 +47,17 @@ async def assign_unit_to_class(
|
||||
request_data: AssignUnitRequest,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> UnitAssignment:
|
||||
"""
|
||||
Assign a unit to a class.
|
||||
|
||||
Creates an assignment that allows students in the class to play the unit.
|
||||
Teacher can configure settings like skip, replay, time limits.
|
||||
"""
|
||||
"""Assign a unit to a class."""
|
||||
assignment_id = str(uuid.uuid4())
|
||||
now = datetime.utcnow()
|
||||
|
||||
settings = request_data.settings or TeacherControlSettings()
|
||||
|
||||
assignment = {
|
||||
"assignment_id": assignment_id,
|
||||
"unit_id": request_data.unit_id,
|
||||
"class_id": request_data.class_id,
|
||||
"teacher_id": teacher["user_id"],
|
||||
"status": UnitAssignmentStatus.ACTIVE,
|
||||
"settings": settings.model_dump(),
|
||||
"due_date": request_data.due_date,
|
||||
"notes": request_data.notes,
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
"assignment_id": assignment_id, "unit_id": request_data.unit_id,
|
||||
"class_id": request_data.class_id, "teacher_id": teacher["user_id"],
|
||||
"status": UnitAssignmentStatus.ACTIVE, "settings": settings.model_dump(),
|
||||
"due_date": request_data.due_date, "notes": request_data.notes,
|
||||
"created_at": now, "updated_at": now,
|
||||
}
|
||||
|
||||
db = await get_teacher_database()
|
||||
@@ -281,22 +67,15 @@ async def assign_unit_to_class(
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store assignment: {e}")
|
||||
|
||||
# Fallback: store in memory
|
||||
_assignments_store[assignment_id] = assignment
|
||||
|
||||
logger.info(f"Unit {request_data.unit_id} assigned to class {request_data.class_id}")
|
||||
|
||||
return UnitAssignment(
|
||||
assignment_id=assignment_id,
|
||||
unit_id=request_data.unit_id,
|
||||
class_id=request_data.class_id,
|
||||
teacher_id=teacher["user_id"],
|
||||
status=UnitAssignmentStatus.ACTIVE,
|
||||
settings=settings,
|
||||
due_date=request_data.due_date,
|
||||
notes=request_data.notes,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
assignment_id=assignment_id, unit_id=request_data.unit_id,
|
||||
class_id=request_data.class_id, teacher_id=teacher["user_id"],
|
||||
status=UnitAssignmentStatus.ACTIVE, settings=settings,
|
||||
due_date=request_data.due_date, notes=request_data.notes,
|
||||
created_at=now, updated_at=now,
|
||||
)
|
||||
|
||||
|
||||
@@ -306,11 +85,7 @@ async def list_assignments(
|
||||
status: Optional[UnitAssignmentStatus] = Query(None, description="Filter by status"),
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> List[UnitAssignment]:
|
||||
"""
|
||||
List all unit assignments for the teacher.
|
||||
|
||||
Optionally filter by class or status.
|
||||
"""
|
||||
"""List all unit assignments for the teacher."""
|
||||
db = await get_teacher_database()
|
||||
assignments = []
|
||||
|
||||
@@ -325,7 +100,6 @@ async def list_assignments(
|
||||
logger.error(f"Failed to list assignments: {e}")
|
||||
|
||||
if not assignments:
|
||||
# Fallback: filter in-memory store
|
||||
for assignment in _assignments_store.values():
|
||||
if assignment["teacher_id"] != teacher["user_id"]:
|
||||
continue
|
||||
@@ -337,16 +111,11 @@ async def list_assignments(
|
||||
|
||||
return [
|
||||
UnitAssignment(
|
||||
assignment_id=a["assignment_id"],
|
||||
unit_id=a["unit_id"],
|
||||
class_id=a["class_id"],
|
||||
teacher_id=a["teacher_id"],
|
||||
status=a["status"],
|
||||
settings=TeacherControlSettings(**a["settings"]),
|
||||
due_date=a.get("due_date"),
|
||||
notes=a.get("notes"),
|
||||
created_at=a["created_at"],
|
||||
updated_at=a["updated_at"],
|
||||
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
||||
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
||||
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
||||
due_date=a.get("due_date"), notes=a.get("notes"),
|
||||
created_at=a["created_at"], updated_at=a["updated_at"],
|
||||
)
|
||||
for a in assignments
|
||||
]
|
||||
@@ -359,41 +128,30 @@ async def get_assignment(
|
||||
) -> UnitAssignment:
|
||||
"""Get details of a specific assignment."""
|
||||
db = await get_teacher_database()
|
||||
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
if assignment and assignment["teacher_id"] == teacher["user_id"]:
|
||||
return UnitAssignment(
|
||||
assignment_id=assignment["assignment_id"],
|
||||
unit_id=assignment["unit_id"],
|
||||
class_id=assignment["class_id"],
|
||||
teacher_id=assignment["teacher_id"],
|
||||
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
||||
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
||||
status=assignment["status"],
|
||||
settings=TeacherControlSettings(**assignment["settings"]),
|
||||
due_date=assignment.get("due_date"),
|
||||
notes=assignment.get("notes"),
|
||||
created_at=assignment["created_at"],
|
||||
updated_at=assignment["updated_at"],
|
||||
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
||||
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
|
||||
# Fallback
|
||||
if assignment_id in _assignments_store:
|
||||
a = _assignments_store[assignment_id]
|
||||
if a["teacher_id"] == teacher["user_id"]:
|
||||
return UnitAssignment(
|
||||
assignment_id=a["assignment_id"],
|
||||
unit_id=a["unit_id"],
|
||||
class_id=a["class_id"],
|
||||
teacher_id=a["teacher_id"],
|
||||
status=a["status"],
|
||||
settings=TeacherControlSettings(**a["settings"]),
|
||||
due_date=a.get("due_date"),
|
||||
notes=a.get("notes"),
|
||||
created_at=a["created_at"],
|
||||
updated_at=a["updated_at"],
|
||||
assignment_id=a["assignment_id"], unit_id=a["unit_id"],
|
||||
class_id=a["class_id"], teacher_id=a["teacher_id"],
|
||||
status=a["status"], settings=TeacherControlSettings(**a["settings"]),
|
||||
due_date=a.get("due_date"), notes=a.get("notes"),
|
||||
created_at=a["created_at"], updated_at=a["updated_at"],
|
||||
)
|
||||
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
@@ -424,7 +182,6 @@ async def update_assignment(
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
# Update fields
|
||||
if settings:
|
||||
assignment["settings"] = settings.model_dump()
|
||||
if status:
|
||||
@@ -444,16 +201,11 @@ async def update_assignment(
|
||||
_assignments_store[assignment_id] = assignment
|
||||
|
||||
return UnitAssignment(
|
||||
assignment_id=assignment["assignment_id"],
|
||||
unit_id=assignment["unit_id"],
|
||||
class_id=assignment["class_id"],
|
||||
teacher_id=assignment["teacher_id"],
|
||||
status=assignment["status"],
|
||||
settings=TeacherControlSettings(**assignment["settings"]),
|
||||
due_date=assignment.get("due_date"),
|
||||
notes=assignment.get("notes"),
|
||||
created_at=assignment["created_at"],
|
||||
updated_at=assignment["updated_at"],
|
||||
assignment_id=assignment["assignment_id"], unit_id=assignment["unit_id"],
|
||||
class_id=assignment["class_id"], teacher_id=assignment["teacher_id"],
|
||||
status=assignment["status"], settings=TeacherControlSettings(**assignment["settings"]),
|
||||
due_date=assignment.get("due_date"), notes=assignment.get("notes"),
|
||||
created_at=assignment["created_at"], updated_at=assignment["updated_at"],
|
||||
)
|
||||
|
||||
|
||||
@@ -464,7 +216,6 @@ async def delete_assignment(
|
||||
) -> Dict[str, str]:
|
||||
"""Delete/archive an assignment."""
|
||||
db = await get_teacher_database()
|
||||
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
@@ -485,339 +236,6 @@ async def delete_assignment(
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Progress & Analytics
|
||||
# ==============================================
|
||||
|
||||
@router.get("/assignments/{assignment_id}/progress", response_model=ClassUnitProgress)
|
||||
async def get_assignment_progress(
|
||||
assignment_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> ClassUnitProgress:
|
||||
"""
|
||||
Get detailed progress for an assignment.
|
||||
|
||||
Shows each student's status, scores, and time spent.
|
||||
"""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
# Get students in class
|
||||
students = await get_students_in_class(assignment["class_id"])
|
||||
|
||||
# Get progress for each student
|
||||
student_progress = []
|
||||
total_completion = 0.0
|
||||
total_precheck = 0.0
|
||||
total_postcheck = 0.0
|
||||
total_time = 0
|
||||
precheck_count = 0
|
||||
postcheck_count = 0
|
||||
started = 0
|
||||
completed = 0
|
||||
|
||||
for student in students:
|
||||
student_id = student.get("id", student.get("student_id"))
|
||||
progress = StudentUnitProgress(
|
||||
student_id=student_id,
|
||||
student_name=student.get("name", f"Student {student_id[:8]}"),
|
||||
status="not_started",
|
||||
completion_rate=0.0,
|
||||
stops_completed=0,
|
||||
total_stops=0,
|
||||
)
|
||||
|
||||
if db:
|
||||
try:
|
||||
session_data = await db.get_student_unit_session(
|
||||
student_id=student_id,
|
||||
unit_id=assignment["unit_id"]
|
||||
)
|
||||
if session_data:
|
||||
progress.session_id = session_data.get("session_id")
|
||||
progress.status = "completed" if session_data.get("completed_at") else "in_progress"
|
||||
progress.completion_rate = session_data.get("completion_rate", 0.0)
|
||||
progress.precheck_score = session_data.get("precheck_score")
|
||||
progress.postcheck_score = session_data.get("postcheck_score")
|
||||
progress.time_spent_minutes = session_data.get("duration_seconds", 0) // 60
|
||||
progress.last_activity = session_data.get("updated_at")
|
||||
progress.stops_completed = session_data.get("stops_completed", 0)
|
||||
progress.total_stops = session_data.get("total_stops", 0)
|
||||
|
||||
if progress.precheck_score is not None and progress.postcheck_score is not None:
|
||||
progress.learning_gain = progress.postcheck_score - progress.precheck_score
|
||||
|
||||
# Aggregate stats
|
||||
total_completion += progress.completion_rate
|
||||
total_time += progress.time_spent_minutes
|
||||
if progress.precheck_score is not None:
|
||||
total_precheck += progress.precheck_score
|
||||
precheck_count += 1
|
||||
if progress.postcheck_score is not None:
|
||||
total_postcheck += progress.postcheck_score
|
||||
postcheck_count += 1
|
||||
if progress.status != "not_started":
|
||||
started += 1
|
||||
if progress.status == "completed":
|
||||
completed += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get student progress: {e}")
|
||||
|
||||
student_progress.append(progress)
|
||||
|
||||
total_students = len(students) or 1 # Avoid division by zero
|
||||
|
||||
return ClassUnitProgress(
|
||||
assignment_id=assignment_id,
|
||||
unit_id=assignment["unit_id"],
|
||||
unit_title=f"Unit {assignment['unit_id']}", # Would load from unit definition
|
||||
class_id=assignment["class_id"],
|
||||
class_name=f"Class {assignment['class_id'][:8]}", # Would load from school service
|
||||
total_students=len(students),
|
||||
started_count=started,
|
||||
completed_count=completed,
|
||||
avg_completion_rate=total_completion / total_students,
|
||||
avg_precheck_score=total_precheck / precheck_count if precheck_count > 0 else None,
|
||||
avg_postcheck_score=total_postcheck / postcheck_count if postcheck_count > 0 else None,
|
||||
avg_learning_gain=(total_postcheck / postcheck_count - total_precheck / precheck_count)
|
||||
if precheck_count > 0 and postcheck_count > 0 else None,
|
||||
avg_time_minutes=total_time / started if started > 0 else 0,
|
||||
students=student_progress,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/classes/{class_id}/analytics", response_model=ClassAnalyticsSummary)
|
||||
async def get_class_analytics(
|
||||
class_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> ClassAnalyticsSummary:
|
||||
"""
|
||||
Get summary analytics for a class.
|
||||
|
||||
Includes all unit assignments, overall progress, and common misconceptions.
|
||||
"""
|
||||
db = await get_teacher_database()
|
||||
|
||||
# Get all assignments for this class
|
||||
assignments = []
|
||||
if db:
|
||||
try:
|
||||
assignments = await db.list_assignments(
|
||||
teacher_id=teacher["user_id"],
|
||||
class_id=class_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list assignments: {e}")
|
||||
|
||||
if not assignments:
|
||||
assignments = [
|
||||
a for a in _assignments_store.values()
|
||||
if a["class_id"] == class_id and a["teacher_id"] == teacher["user_id"]
|
||||
]
|
||||
|
||||
total_units = len(assignments)
|
||||
completed_units = sum(1 for a in assignments if a.get("status") == "completed")
|
||||
active_units = sum(1 for a in assignments if a.get("status") == "active")
|
||||
|
||||
# Aggregate student performance
|
||||
students = await get_students_in_class(class_id)
|
||||
student_scores = {}
|
||||
misconceptions = []
|
||||
|
||||
if db:
|
||||
try:
|
||||
for student in students:
|
||||
student_id = student.get("id", student.get("student_id"))
|
||||
analytics = await db.get_student_analytics(student_id)
|
||||
if analytics:
|
||||
student_scores[student_id] = {
|
||||
"name": student.get("name", student_id[:8]),
|
||||
"avg_score": analytics.get("avg_postcheck_score", 0),
|
||||
"total_time": analytics.get("total_time_minutes", 0),
|
||||
}
|
||||
|
||||
# Get common misconceptions
|
||||
misconceptions_data = await db.get_class_misconceptions(class_id)
|
||||
for m in misconceptions_data:
|
||||
misconceptions.append(MisconceptionReport(
|
||||
concept_id=m["concept_id"],
|
||||
concept_label=m["concept_label"],
|
||||
misconception=m["misconception"],
|
||||
affected_students=m["affected_students"],
|
||||
frequency=m["frequency"],
|
||||
unit_id=m["unit_id"],
|
||||
stop_id=m["stop_id"],
|
||||
))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to aggregate analytics: {e}")
|
||||
|
||||
# Identify top and struggling students
|
||||
sorted_students = sorted(
|
||||
student_scores.items(),
|
||||
key=lambda x: x[1]["avg_score"],
|
||||
reverse=True
|
||||
)
|
||||
top_performers = [s[1]["name"] for s in sorted_students[:3]]
|
||||
struggling_students = [s[1]["name"] for s in sorted_students[-3:] if s[1]["avg_score"] < 0.6]
|
||||
|
||||
total_time = sum(s["total_time"] for s in student_scores.values())
|
||||
avg_scores = [s["avg_score"] for s in student_scores.values() if s["avg_score"] > 0]
|
||||
avg_completion = sum(avg_scores) / len(avg_scores) if avg_scores else 0
|
||||
|
||||
return ClassAnalyticsSummary(
|
||||
class_id=class_id,
|
||||
class_name=f"Klasse {class_id[:8]}",
|
||||
total_units_assigned=total_units,
|
||||
units_completed=completed_units,
|
||||
active_units=active_units,
|
||||
avg_completion_rate=avg_completion,
|
||||
avg_learning_gain=None, # Would calculate from pre/post scores
|
||||
total_time_hours=total_time / 60,
|
||||
top_performers=top_performers,
|
||||
struggling_students=struggling_students,
|
||||
common_misconceptions=misconceptions[:5],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/students/{student_id}/progress")
|
||||
async def get_student_progress(
|
||||
student_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get detailed progress for a specific student.
|
||||
|
||||
Shows all units attempted and their performance.
|
||||
"""
|
||||
db = await get_teacher_database()
|
||||
|
||||
if db:
|
||||
try:
|
||||
progress = await db.get_student_full_progress(student_id)
|
||||
return progress
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get student progress: {e}")
|
||||
|
||||
return {
|
||||
"student_id": student_id,
|
||||
"units_attempted": 0,
|
||||
"units_completed": 0,
|
||||
"avg_score": 0.0,
|
||||
"total_time_minutes": 0,
|
||||
"sessions": [],
|
||||
}
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Content Resources
|
||||
# ==============================================
|
||||
|
||||
@router.get("/assignments/{assignment_id}/resources", response_model=List[ContentResource])
|
||||
async def get_assignment_resources(
|
||||
assignment_id: str,
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher),
|
||||
request: Request = None
|
||||
) -> List[ContentResource]:
|
||||
"""
|
||||
Get generated content resources for an assignment.
|
||||
|
||||
Returns links to H5P activities and PDF worksheets.
|
||||
"""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
unit_id = assignment["unit_id"]
|
||||
base_url = str(request.base_url).rstrip("/") if request else "http://localhost:8000"
|
||||
|
||||
resources = [
|
||||
ContentResource(
|
||||
resource_type="h5p",
|
||||
title=f"{unit_id} - H5P Aktivitaeten",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/h5p",
|
||||
generated_at=datetime.utcnow(),
|
||||
unit_id=unit_id,
|
||||
),
|
||||
ContentResource(
|
||||
resource_type="worksheet",
|
||||
title=f"{unit_id} - Arbeitsblatt (HTML)",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet",
|
||||
generated_at=datetime.utcnow(),
|
||||
unit_id=unit_id,
|
||||
),
|
||||
ContentResource(
|
||||
resource_type="pdf",
|
||||
title=f"{unit_id} - Arbeitsblatt (PDF)",
|
||||
url=f"{base_url}/api/units/content/{unit_id}/worksheet.pdf",
|
||||
generated_at=datetime.utcnow(),
|
||||
unit_id=unit_id,
|
||||
),
|
||||
]
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
@router.post("/assignments/{assignment_id}/regenerate-content")
|
||||
async def regenerate_content(
|
||||
assignment_id: str,
|
||||
resource_type: str = Query("all", description="h5p, pdf, or all"),
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Trigger regeneration of content resources.
|
||||
|
||||
Useful after updating unit definitions.
|
||||
"""
|
||||
db = await get_teacher_database()
|
||||
assignment = None
|
||||
|
||||
if db:
|
||||
try:
|
||||
assignment = await db.get_assignment(assignment_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get assignment: {e}")
|
||||
|
||||
if not assignment and assignment_id in _assignments_store:
|
||||
assignment = _assignments_store[assignment_id]
|
||||
|
||||
if not assignment or assignment["teacher_id"] != teacher["user_id"]:
|
||||
raise HTTPException(status_code=404, detail="Assignment not found")
|
||||
|
||||
# In production, this would trigger async job to regenerate content
|
||||
logger.info(f"Content regeneration triggered for {assignment['unit_id']}: {resource_type}")
|
||||
|
||||
return {
|
||||
"status": "queued",
|
||||
"assignment_id": assignment_id,
|
||||
"unit_id": assignment["unit_id"],
|
||||
"resource_type": resource_type,
|
||||
"message": "Content regeneration has been queued",
|
||||
}
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Available Units
|
||||
# ==============================================
|
||||
@@ -829,51 +247,30 @@ async def list_available_units(
|
||||
locale: str = Query("de-DE", description="Locale"),
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
List all available units for assignment.
|
||||
|
||||
Teachers see all published units matching their criteria.
|
||||
"""
|
||||
"""List all available units for assignment."""
|
||||
db = await get_teacher_database()
|
||||
|
||||
if db:
|
||||
try:
|
||||
units = await db.list_available_units(
|
||||
grade=grade,
|
||||
template=template,
|
||||
locale=locale
|
||||
)
|
||||
units = await db.list_available_units(grade=grade, template=template, locale=locale)
|
||||
return units
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list units: {e}")
|
||||
|
||||
# Fallback: return demo units
|
||||
return [
|
||||
{
|
||||
"unit_id": "bio_eye_lightpath_v1",
|
||||
"title": "Auge - Lichtstrahl-Flug",
|
||||
"template": "flight_path",
|
||||
"grade_band": ["5", "6", "7"],
|
||||
"duration_minutes": 8,
|
||||
"difficulty": "base",
|
||||
"unit_id": "bio_eye_lightpath_v1", "title": "Auge - Lichtstrahl-Flug",
|
||||
"template": "flight_path", "grade_band": ["5", "6", "7"],
|
||||
"duration_minutes": 8, "difficulty": "base",
|
||||
"description": "Reise durch das Auge und folge dem Lichtstrahl",
|
||||
"learning_objectives": [
|
||||
"Verstehen des Lichtwegs durch das Auge",
|
||||
"Funktionen der Augenbestandteile benennen",
|
||||
],
|
||||
"learning_objectives": ["Verstehen des Lichtwegs durch das Auge",
|
||||
"Funktionen der Augenbestandteile benennen"],
|
||||
},
|
||||
{
|
||||
"unit_id": "math_pizza_equivalence_v1",
|
||||
"title": "Pizza-Boxenstopp - Brueche und Prozent",
|
||||
"template": "station_loop",
|
||||
"grade_band": ["5", "6"],
|
||||
"duration_minutes": 10,
|
||||
"difficulty": "base",
|
||||
"template": "station_loop", "grade_band": ["5", "6"],
|
||||
"duration_minutes": 10, "difficulty": "base",
|
||||
"description": "Entdecke die Verbindung zwischen Bruechen, Dezimalzahlen und Prozent",
|
||||
"learning_objectives": [
|
||||
"Brueche in Prozent umrechnen",
|
||||
"Aequivalenzen erkennen",
|
||||
],
|
||||
"learning_objectives": ["Brueche in Prozent umrechnen", "Aequivalenzen erkennen"],
|
||||
},
|
||||
]
|
||||
|
||||
@@ -886,54 +283,38 @@ async def list_available_units(
|
||||
async def get_dashboard(
|
||||
teacher: Dict[str, Any] = Depends(get_current_teacher)
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get teacher dashboard overview.
|
||||
|
||||
Summary of all classes, active assignments, and alerts.
|
||||
"""
|
||||
"""Get teacher dashboard overview."""
|
||||
db = await get_teacher_database()
|
||||
|
||||
# Get teacher's classes
|
||||
classes = await get_classes_for_teacher(teacher["user_id"])
|
||||
|
||||
# Get all active assignments
|
||||
active_assignments = []
|
||||
if db:
|
||||
try:
|
||||
active_assignments = await db.list_assignments(
|
||||
teacher_id=teacher["user_id"],
|
||||
status="active"
|
||||
teacher_id=teacher["user_id"], status="active"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list assignments: {e}")
|
||||
|
||||
if not active_assignments:
|
||||
active_assignments = [
|
||||
a for a in _assignments_store.values()
|
||||
if a["teacher_id"] == teacher["user_id"] and a.get("status") == "active"
|
||||
]
|
||||
|
||||
# Calculate alerts (students falling behind, due dates, etc.)
|
||||
alerts = []
|
||||
for assignment in active_assignments:
|
||||
if assignment.get("due_date") and assignment["due_date"] < datetime.utcnow() + timedelta(days=2):
|
||||
alerts.append({
|
||||
"type": "due_soon",
|
||||
"assignment_id": assignment["assignment_id"],
|
||||
"message": f"Zuweisung endet in weniger als 2 Tagen",
|
||||
"type": "due_soon", "assignment_id": assignment["assignment_id"],
|
||||
"message": "Zuweisung endet in weniger als 2 Tagen",
|
||||
})
|
||||
|
||||
return {
|
||||
"teacher": {
|
||||
"id": teacher["user_id"],
|
||||
"name": teacher.get("name", "Lehrer"),
|
||||
"email": teacher.get("email"),
|
||||
},
|
||||
"classes": len(classes),
|
||||
"active_assignments": len(active_assignments),
|
||||
"teacher": {"id": teacher["user_id"], "name": teacher.get("name", "Lehrer"),
|
||||
"email": teacher.get("email")},
|
||||
"classes": len(classes), "active_assignments": len(active_assignments),
|
||||
"total_students": sum(c.get("student_count", 0) for c in classes),
|
||||
"alerts": alerts,
|
||||
"recent_activity": [], # Would load recent session completions
|
||||
"alerts": alerts, "recent_activity": [],
|
||||
}
|
||||
|
||||
|
||||
@@ -942,10 +323,7 @@ async def health_check() -> Dict[str, Any]:
|
||||
"""Health check for teacher dashboard API."""
|
||||
db = await get_teacher_database()
|
||||
db_status = "connected" if db else "in-memory"
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "teacher-dashboard",
|
||||
"database": db_status,
|
||||
"auth_required": REQUIRE_AUTH,
|
||||
"status": "healthy", "service": "teacher-dashboard",
|
||||
"database": db_status, "auth_required": REQUIRE_AUTH,
|
||||
}
|
||||
|
||||
226
backend-lehrer/teacher_dashboard_models.py
Normal file
226
backend-lehrer/teacher_dashboard_models.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Teacher Dashboard - Pydantic Models, Auth Dependency, and Service Helpers.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict, Any
|
||||
from enum import Enum
|
||||
|
||||
from fastapi import HTTPException, Request
|
||||
from pydantic import BaseModel
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Feature flags
|
||||
USE_DATABASE = os.getenv("GAME_USE_DATABASE", "true").lower() == "true"
|
||||
REQUIRE_AUTH = os.getenv("TEACHER_REQUIRE_AUTH", "true").lower() == "true"
|
||||
SCHOOL_SERVICE_URL = os.getenv("SCHOOL_SERVICE_URL", "http://school-service:8084")
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Pydantic Models
|
||||
# ==============================================
|
||||
|
||||
class UnitAssignmentStatus(str, Enum):
|
||||
"""Status of a unit assignment"""
|
||||
DRAFT = "draft"
|
||||
ACTIVE = "active"
|
||||
COMPLETED = "completed"
|
||||
ARCHIVED = "archived"
|
||||
|
||||
|
||||
class TeacherControlSettings(BaseModel):
|
||||
"""Unit settings that teachers can configure"""
|
||||
allow_skip: bool = True
|
||||
allow_replay: bool = True
|
||||
max_time_per_stop_sec: int = 90
|
||||
show_hints: bool = True
|
||||
require_precheck: bool = True
|
||||
require_postcheck: bool = True
|
||||
|
||||
|
||||
class AssignUnitRequest(BaseModel):
|
||||
"""Request to assign a unit to a class"""
|
||||
unit_id: str
|
||||
class_id: str
|
||||
due_date: Optional[datetime] = None
|
||||
settings: Optional[TeacherControlSettings] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class UnitAssignment(BaseModel):
|
||||
"""Unit assignment record"""
|
||||
assignment_id: str
|
||||
unit_id: str
|
||||
class_id: str
|
||||
teacher_id: str
|
||||
status: UnitAssignmentStatus
|
||||
settings: TeacherControlSettings
|
||||
due_date: Optional[datetime] = None
|
||||
notes: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class StudentUnitProgress(BaseModel):
|
||||
"""Progress of a single student on a unit"""
|
||||
student_id: str
|
||||
student_name: str
|
||||
session_id: Optional[str] = None
|
||||
status: str # "not_started", "in_progress", "completed"
|
||||
completion_rate: float = 0.0
|
||||
precheck_score: Optional[float] = None
|
||||
postcheck_score: Optional[float] = None
|
||||
learning_gain: Optional[float] = None
|
||||
time_spent_minutes: int = 0
|
||||
last_activity: Optional[datetime] = None
|
||||
current_stop: Optional[str] = None
|
||||
stops_completed: int = 0
|
||||
total_stops: int = 0
|
||||
|
||||
|
||||
class ClassUnitProgress(BaseModel):
|
||||
"""Overall progress of a class on a unit"""
|
||||
assignment_id: str
|
||||
unit_id: str
|
||||
unit_title: str
|
||||
class_id: str
|
||||
class_name: str
|
||||
total_students: int
|
||||
started_count: int
|
||||
completed_count: int
|
||||
avg_completion_rate: float
|
||||
avg_precheck_score: Optional[float] = None
|
||||
avg_postcheck_score: Optional[float] = None
|
||||
avg_learning_gain: Optional[float] = None
|
||||
avg_time_minutes: float
|
||||
students: List[StudentUnitProgress]
|
||||
|
||||
|
||||
class MisconceptionReport(BaseModel):
|
||||
"""Report of detected misconceptions"""
|
||||
concept_id: str
|
||||
concept_label: str
|
||||
misconception: str
|
||||
affected_students: List[str]
|
||||
frequency: int
|
||||
unit_id: str
|
||||
stop_id: str
|
||||
|
||||
|
||||
class ClassAnalyticsSummary(BaseModel):
|
||||
"""Summary analytics for a class"""
|
||||
class_id: str
|
||||
class_name: str
|
||||
total_units_assigned: int
|
||||
units_completed: int
|
||||
active_units: int
|
||||
avg_completion_rate: float
|
||||
avg_learning_gain: Optional[float]
|
||||
total_time_hours: float
|
||||
top_performers: List[str]
|
||||
struggling_students: List[str]
|
||||
common_misconceptions: List[MisconceptionReport]
|
||||
|
||||
|
||||
class ContentResource(BaseModel):
|
||||
"""Generated content resource"""
|
||||
resource_type: str # "h5p", "pdf", "worksheet"
|
||||
title: str
|
||||
url: str
|
||||
generated_at: datetime
|
||||
unit_id: str
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Auth Dependency
|
||||
# ==============================================
|
||||
|
||||
async def get_current_teacher(request: Request) -> Dict[str, Any]:
|
||||
"""Get current teacher from JWT token."""
|
||||
if not REQUIRE_AUTH:
|
||||
return {
|
||||
"user_id": "e9484ad9-32ee-4f2b-a4e1-d182e02ccf20",
|
||||
"email": "demo@breakpilot.app",
|
||||
"role": "teacher",
|
||||
"name": "Demo Lehrer"
|
||||
}
|
||||
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header.startswith("Bearer "):
|
||||
raise HTTPException(status_code=401, detail="Missing authorization token")
|
||||
|
||||
try:
|
||||
import jwt
|
||||
token = auth_header[7:]
|
||||
secret = os.getenv("JWT_SECRET", "dev-secret-key")
|
||||
payload = jwt.decode(token, secret, algorithms=["HS256"])
|
||||
|
||||
if payload.get("role") not in ["teacher", "admin"]:
|
||||
raise HTTPException(status_code=403, detail="Teacher or admin role required")
|
||||
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise HTTPException(status_code=401, detail="Token expired")
|
||||
except jwt.InvalidTokenError:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
|
||||
|
||||
# ==============================================
|
||||
# Database Integration
|
||||
# ==============================================
|
||||
|
||||
_teacher_db = None
|
||||
|
||||
|
||||
async def get_teacher_database():
|
||||
"""Get teacher database instance with lazy initialization."""
|
||||
global _teacher_db
|
||||
if not USE_DATABASE:
|
||||
return None
|
||||
if _teacher_db is None:
|
||||
try:
|
||||
from unit.database import get_teacher_db
|
||||
_teacher_db = await get_teacher_db()
|
||||
logger.info("Teacher database initialized")
|
||||
except ImportError:
|
||||
logger.warning("Teacher database module not available")
|
||||
except Exception as e:
|
||||
logger.warning(f"Teacher database not available: {e}")
|
||||
return _teacher_db
|
||||
|
||||
|
||||
# ==============================================
|
||||
# School Service Integration
|
||||
# ==============================================
|
||||
|
||||
async def get_classes_for_teacher(teacher_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get classes assigned to a teacher from school service."""
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes",
|
||||
headers={"X-Teacher-ID": teacher_id}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get classes from school service: {e}")
|
||||
return []
|
||||
|
||||
|
||||
async def get_students_in_class(class_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get students in a class from school service."""
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{SCHOOL_SERVICE_URL}/api/v1/school/classes/{class_id}/students"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get students from school service: {e}")
|
||||
return []
|
||||
Reference in New Issue
Block a user