[split-required] Split 700-870 LOC files across all services
backend-lehrer (11 files): - llm_gateway/routes/schools.py (867 → 5), recording_api.py (848 → 6) - messenger_api.py (840 → 5), print_generator.py (824 → 5) - unit_analytics_api.py (751 → 5), classroom/routes/context.py (726 → 4) - llm_gateway/routes/edu_search_seeds.py (710 → 4) klausur-service (12 files): - ocr_labeling_api.py (845 → 4), metrics_db.py (833 → 4) - legal_corpus_api.py (790 → 4), page_crop.py (758 → 3) - mail/ai_service.py (747 → 4), github_crawler.py (767 → 3) - trocr_service.py (730 → 4), full_compliance_pipeline.py (723 → 4) - dsfa_rag_api.py (715 → 4), ocr_pipeline_auto.py (705 → 4) website (6 pages): - audit-checklist (867 → 8), content (806 → 6) - screen-flow (790 → 4), scraper (789 → 5) - zeugnisse (776 → 5), modules (745 → 4) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
145
backend-lehrer/unit_analytics_export.py
Normal file
145
backend-lehrer/unit_analytics_export.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Unit Analytics API - Export & Dashboard Routes.
|
||||
|
||||
Export endpoints for learning gains and misconceptions, plus dashboard overview.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from fastapi.responses import Response
|
||||
|
||||
from unit_analytics_models import TimeRange, ExportFormat
|
||||
from unit_analytics_helpers import get_analytics_database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["Unit Analytics"])
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Export
|
||||
# ==============================================
|
||||
|
||||
@router.get("/export/learning-gains")
|
||||
async def export_learning_gains(
|
||||
unit_id: Optional[str] = Query(None),
|
||||
class_id: Optional[str] = Query(None),
|
||||
time_range: TimeRange = Query(TimeRange.ALL),
|
||||
format: ExportFormat = Query(ExportFormat.JSON),
|
||||
) -> Any:
|
||||
"""
|
||||
Export learning gain data.
|
||||
"""
|
||||
db = await get_analytics_database()
|
||||
data = []
|
||||
|
||||
if db:
|
||||
try:
|
||||
data = await db.export_learning_gains(
|
||||
unit_id=unit_id, class_id=class_id, time_range=time_range.value
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export data: {e}")
|
||||
|
||||
if format == ExportFormat.CSV:
|
||||
if not data:
|
||||
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
||||
else:
|
||||
csv_content = "student_id,unit_id,precheck,postcheck,gain\n"
|
||||
for row in data:
|
||||
csv_content += f"{row['student_id']},{row['unit_id']},{row.get('precheck', '')},{row.get('postcheck', '')},{row.get('gain', '')}\n"
|
||||
|
||||
return Response(
|
||||
content=csv_content,
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=learning_gains.csv"}
|
||||
)
|
||||
|
||||
return {
|
||||
"export_date": datetime.utcnow().isoformat(),
|
||||
"filters": {
|
||||
"unit_id": unit_id, "class_id": class_id, "time_range": time_range.value,
|
||||
},
|
||||
"data": data,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/export/misconceptions")
|
||||
async def export_misconceptions(
|
||||
class_id: Optional[str] = Query(None),
|
||||
format: ExportFormat = Query(ExportFormat.JSON),
|
||||
) -> Any:
|
||||
"""
|
||||
Export misconception data for further analysis.
|
||||
"""
|
||||
# Import here to avoid circular dependency
|
||||
from unit_analytics_routes import get_misconception_report
|
||||
|
||||
report = await get_misconception_report(
|
||||
class_id=class_id, unit_id=None,
|
||||
time_range=TimeRange.MONTH, limit=100
|
||||
)
|
||||
|
||||
if format == ExportFormat.CSV:
|
||||
csv_content = "concept_id,concept_label,misconception,frequency,unit_id,stop_id\n"
|
||||
for m in report.most_common:
|
||||
csv_content += f'"{m.concept_id}","{m.concept_label}","{m.misconception_text}",{m.frequency},"{m.unit_id}","{m.stop_id}"\n'
|
||||
|
||||
return Response(
|
||||
content=csv_content,
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=misconceptions.csv"}
|
||||
)
|
||||
|
||||
return {
|
||||
"export_date": datetime.utcnow().isoformat(),
|
||||
"class_id": class_id,
|
||||
"total_entries": len(report.most_common),
|
||||
"data": [m.model_dump() for m in report.most_common],
|
||||
}
|
||||
|
||||
|
||||
# ==============================================
|
||||
# API Endpoints - Dashboard Aggregates
|
||||
# ==============================================
|
||||
|
||||
@router.get("/dashboard/overview")
|
||||
async def get_analytics_overview(
|
||||
time_range: TimeRange = Query(TimeRange.MONTH),
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get high-level analytics overview for dashboard.
|
||||
"""
|
||||
db = await get_analytics_database()
|
||||
|
||||
if db:
|
||||
try:
|
||||
overview = await db.get_analytics_overview(time_range.value)
|
||||
return overview
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get analytics overview: {e}")
|
||||
|
||||
return {
|
||||
"time_range": time_range.value,
|
||||
"total_sessions": 0,
|
||||
"unique_students": 0,
|
||||
"avg_completion_rate": 0.0,
|
||||
"avg_learning_gain": 0.0,
|
||||
"most_played_units": [],
|
||||
"struggling_concepts": [],
|
||||
"active_classes": 0,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check() -> Dict[str, Any]:
|
||||
"""Health check for analytics API."""
|
||||
db = await get_analytics_database()
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "unit-analytics",
|
||||
"database": "connected" if db else "disconnected",
|
||||
}
|
||||
Reference in New Issue
Block a user