klausur-service (7 monoliths): - grid_editor_helpers.py (1,737 → 5 files: columns, filters, headers, zones) - cv_cell_grid.py (1,675 → 7 files: build, legacy, streaming, merge, vocab) - worksheet_editor_api.py (1,305 → 4 files: models, AI, reconstruct, routes) - legal_corpus_ingestion.py (1,280 → 3 files: registry, chunking, ingestion) - cv_review.py (1,248 → 4 files: pipeline, spell, LLM, barrel) - cv_preprocessing.py (1,166 → 3 files: deskew, dewarp, barrel) - rbac.py, admin_api.py, routes/eh.py remain (next batch) backend-lehrer (1 monolith): - classroom_engine/repository.py (1,705 → 7 files by domain) All re-export barrels preserve backward compatibility. Zero import errors verified. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
316 lines
11 KiB
Python
316 lines
11 KiB
Python
"""
|
|
Reflection & Analytics Repositories.
|
|
|
|
CRUD-Operationen fuer Lesson-Reflections und Analytics-Abfragen (Phase 5).
|
|
"""
|
|
from datetime import datetime
|
|
from typing import Optional, List, Dict, Any
|
|
|
|
from sqlalchemy.orm import Session as DBSession
|
|
|
|
from .db_models import LessonSessionDB, LessonPhaseEnum, LessonReflectionDB
|
|
from .analytics import (
|
|
LessonReflection, SessionSummary, TeacherAnalytics, AnalyticsCalculator,
|
|
)
|
|
|
|
|
|
class ReflectionRepository:
|
|
"""Repository fuer LessonReflection CRUD-Operationen."""
|
|
|
|
def __init__(self, db: DBSession):
|
|
self.db = db
|
|
|
|
# ==================== CREATE ====================
|
|
|
|
def create(self, reflection: LessonReflection) -> LessonReflectionDB:
|
|
"""Erstellt eine neue Reflection."""
|
|
db_reflection = LessonReflectionDB(
|
|
id=reflection.reflection_id,
|
|
session_id=reflection.session_id,
|
|
teacher_id=reflection.teacher_id,
|
|
notes=reflection.notes,
|
|
overall_rating=reflection.overall_rating,
|
|
what_worked=reflection.what_worked,
|
|
improvements=reflection.improvements,
|
|
notes_for_next_lesson=reflection.notes_for_next_lesson,
|
|
)
|
|
self.db.add(db_reflection)
|
|
self.db.commit()
|
|
self.db.refresh(db_reflection)
|
|
return db_reflection
|
|
|
|
# ==================== READ ====================
|
|
|
|
def get_by_id(self, reflection_id: str) -> Optional[LessonReflectionDB]:
|
|
"""Holt eine Reflection nach ID."""
|
|
return self.db.query(LessonReflectionDB).filter(
|
|
LessonReflectionDB.id == reflection_id
|
|
).first()
|
|
|
|
def get_by_session(self, session_id: str) -> Optional[LessonReflectionDB]:
|
|
"""Holt die Reflection einer Session."""
|
|
return self.db.query(LessonReflectionDB).filter(
|
|
LessonReflectionDB.session_id == session_id
|
|
).first()
|
|
|
|
def get_by_teacher(
|
|
self,
|
|
teacher_id: str,
|
|
limit: int = 20,
|
|
offset: int = 0
|
|
) -> List[LessonReflectionDB]:
|
|
"""Holt alle Reflections eines Lehrers."""
|
|
return self.db.query(LessonReflectionDB).filter(
|
|
LessonReflectionDB.teacher_id == teacher_id
|
|
).order_by(
|
|
LessonReflectionDB.created_at.desc()
|
|
).offset(offset).limit(limit).all()
|
|
|
|
# ==================== UPDATE ====================
|
|
|
|
def update(self, reflection: LessonReflection) -> Optional[LessonReflectionDB]:
|
|
"""Aktualisiert eine Reflection."""
|
|
db_reflection = self.get_by_id(reflection.reflection_id)
|
|
if not db_reflection:
|
|
return None
|
|
|
|
db_reflection.notes = reflection.notes
|
|
db_reflection.overall_rating = reflection.overall_rating
|
|
db_reflection.what_worked = reflection.what_worked
|
|
db_reflection.improvements = reflection.improvements
|
|
db_reflection.notes_for_next_lesson = reflection.notes_for_next_lesson
|
|
|
|
self.db.commit()
|
|
self.db.refresh(db_reflection)
|
|
return db_reflection
|
|
|
|
# ==================== DELETE ====================
|
|
|
|
def delete(self, reflection_id: str) -> bool:
|
|
"""Loescht eine Reflection."""
|
|
db_reflection = self.get_by_id(reflection_id)
|
|
if not db_reflection:
|
|
return False
|
|
|
|
self.db.delete(db_reflection)
|
|
self.db.commit()
|
|
return True
|
|
|
|
# ==================== CONVERSION ====================
|
|
|
|
def to_dataclass(self, db_reflection: LessonReflectionDB) -> LessonReflection:
|
|
"""Konvertiert DB-Model zu Dataclass."""
|
|
return LessonReflection(
|
|
reflection_id=db_reflection.id,
|
|
session_id=db_reflection.session_id,
|
|
teacher_id=db_reflection.teacher_id,
|
|
notes=db_reflection.notes or "",
|
|
overall_rating=db_reflection.overall_rating,
|
|
what_worked=db_reflection.what_worked or [],
|
|
improvements=db_reflection.improvements or [],
|
|
notes_for_next_lesson=db_reflection.notes_for_next_lesson or "",
|
|
created_at=db_reflection.created_at,
|
|
updated_at=db_reflection.updated_at,
|
|
)
|
|
|
|
|
|
class AnalyticsRepository:
|
|
"""Repository fuer Analytics-Abfragen."""
|
|
|
|
def __init__(self, db: DBSession):
|
|
self.db = db
|
|
|
|
def get_session_summary(self, session_id: str) -> Optional[SessionSummary]:
|
|
"""
|
|
Berechnet die Summary einer abgeschlossenen Session.
|
|
|
|
Args:
|
|
session_id: ID der Session
|
|
|
|
Returns:
|
|
SessionSummary oder None wenn Session nicht gefunden
|
|
"""
|
|
db_session = self.db.query(LessonSessionDB).filter(
|
|
LessonSessionDB.id == session_id
|
|
).first()
|
|
|
|
if not db_session:
|
|
return None
|
|
|
|
# Session-Daten zusammenstellen
|
|
session_data = {
|
|
"session_id": db_session.id,
|
|
"teacher_id": db_session.teacher_id,
|
|
"class_id": db_session.class_id,
|
|
"subject": db_session.subject,
|
|
"topic": db_session.topic,
|
|
"lesson_started_at": db_session.lesson_started_at,
|
|
"lesson_ended_at": db_session.lesson_ended_at,
|
|
"phase_durations": db_session.phase_durations or {},
|
|
}
|
|
|
|
# Phase History aus DB oder JSON
|
|
phase_history = db_session.phase_history or []
|
|
|
|
# Summary berechnen
|
|
return AnalyticsCalculator.calculate_session_summary(
|
|
session_data, phase_history
|
|
)
|
|
|
|
def get_teacher_analytics(
|
|
self,
|
|
teacher_id: str,
|
|
period_start: Optional[datetime] = None,
|
|
period_end: Optional[datetime] = None
|
|
) -> TeacherAnalytics:
|
|
"""
|
|
Berechnet aggregierte Statistiken fuer einen Lehrer.
|
|
|
|
Args:
|
|
teacher_id: ID des Lehrers
|
|
period_start: Beginn des Zeitraums (default: 30 Tage zurueck)
|
|
period_end: Ende des Zeitraums (default: jetzt)
|
|
|
|
Returns:
|
|
TeacherAnalytics mit aggregierten Statistiken
|
|
"""
|
|
from datetime import timedelta
|
|
|
|
if not period_end:
|
|
period_end = datetime.utcnow()
|
|
if not period_start:
|
|
period_start = period_end - timedelta(days=30)
|
|
|
|
# Sessions im Zeitraum abfragen
|
|
sessions_query = self.db.query(LessonSessionDB).filter(
|
|
LessonSessionDB.teacher_id == teacher_id,
|
|
LessonSessionDB.lesson_started_at >= period_start,
|
|
LessonSessionDB.lesson_started_at <= period_end
|
|
).all()
|
|
|
|
# Sessions zu Dictionaries konvertieren
|
|
sessions_data = []
|
|
for db_session in sessions_query:
|
|
sessions_data.append({
|
|
"session_id": db_session.id,
|
|
"teacher_id": db_session.teacher_id,
|
|
"class_id": db_session.class_id,
|
|
"subject": db_session.subject,
|
|
"topic": db_session.topic,
|
|
"lesson_started_at": db_session.lesson_started_at,
|
|
"lesson_ended_at": db_session.lesson_ended_at,
|
|
"phase_durations": db_session.phase_durations or {},
|
|
"phase_history": db_session.phase_history or [],
|
|
})
|
|
|
|
return AnalyticsCalculator.calculate_teacher_analytics(
|
|
sessions_data, period_start, period_end
|
|
)
|
|
|
|
def get_phase_duration_trends(
|
|
self,
|
|
teacher_id: str,
|
|
phase: str,
|
|
limit: int = 20
|
|
) -> List[Dict[str, Any]]:
|
|
"""
|
|
Gibt die Dauer-Trends fuer eine bestimmte Phase zurueck.
|
|
|
|
Args:
|
|
teacher_id: ID des Lehrers
|
|
phase: Phasen-ID (einstieg, erarbeitung, etc.)
|
|
limit: Max Anzahl der Datenpunkte
|
|
|
|
Returns:
|
|
Liste von Datenpunkten [{date, planned, actual, difference}]
|
|
"""
|
|
sessions = self.db.query(LessonSessionDB).filter(
|
|
LessonSessionDB.teacher_id == teacher_id,
|
|
LessonSessionDB.current_phase == LessonPhaseEnum.ENDED
|
|
).order_by(
|
|
LessonSessionDB.lesson_ended_at.desc()
|
|
).limit(limit).all()
|
|
|
|
trends = []
|
|
for db_session in sessions:
|
|
history = db_session.phase_history or []
|
|
for entry in history:
|
|
if entry.get("phase") == phase:
|
|
planned = (db_session.phase_durations or {}).get(phase, 0) * 60
|
|
actual = entry.get("duration_seconds", 0) or 0
|
|
trends.append({
|
|
"date": db_session.lesson_started_at.isoformat() if db_session.lesson_started_at else None,
|
|
"session_id": db_session.id,
|
|
"subject": db_session.subject,
|
|
"planned_seconds": planned,
|
|
"actual_seconds": actual,
|
|
"difference_seconds": actual - planned,
|
|
})
|
|
break
|
|
|
|
return list(reversed(trends)) # Chronologisch sortieren
|
|
|
|
def get_overtime_analysis(
|
|
self,
|
|
teacher_id: str,
|
|
limit: int = 30
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Analysiert Overtime-Muster.
|
|
|
|
Args:
|
|
teacher_id: ID des Lehrers
|
|
limit: Anzahl der zu analysierenden Sessions
|
|
|
|
Returns:
|
|
Dict mit Overtime-Statistiken pro Phase
|
|
"""
|
|
sessions = self.db.query(LessonSessionDB).filter(
|
|
LessonSessionDB.teacher_id == teacher_id,
|
|
LessonSessionDB.current_phase == LessonPhaseEnum.ENDED
|
|
).order_by(
|
|
LessonSessionDB.lesson_ended_at.desc()
|
|
).limit(limit).all()
|
|
|
|
phase_overtime: Dict[str, List[int]] = {
|
|
"einstieg": [],
|
|
"erarbeitung": [],
|
|
"sicherung": [],
|
|
"transfer": [],
|
|
"reflexion": [],
|
|
}
|
|
|
|
for db_session in sessions:
|
|
history = db_session.phase_history or []
|
|
phase_durations = db_session.phase_durations or {}
|
|
|
|
for entry in history:
|
|
phase = entry.get("phase", "")
|
|
if phase in phase_overtime:
|
|
planned = phase_durations.get(phase, 0) * 60
|
|
actual = entry.get("duration_seconds", 0) or 0
|
|
overtime = max(0, actual - planned)
|
|
phase_overtime[phase].append(overtime)
|
|
|
|
# Statistiken berechnen
|
|
result = {}
|
|
for phase, overtimes in phase_overtime.items():
|
|
if overtimes:
|
|
result[phase] = {
|
|
"count": len([o for o in overtimes if o > 0]),
|
|
"total": len(overtimes),
|
|
"avg_overtime_seconds": sum(overtimes) / len(overtimes),
|
|
"max_overtime_seconds": max(overtimes),
|
|
"overtime_percentage": len([o for o in overtimes if o > 0]) / len(overtimes) * 100,
|
|
}
|
|
else:
|
|
result[phase] = {
|
|
"count": 0,
|
|
"total": 0,
|
|
"avg_overtime_seconds": 0,
|
|
"max_overtime_seconds": 0,
|
|
"overtime_percentage": 0,
|
|
}
|
|
|
|
return result
|