A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
581 lines
20 KiB
Python
581 lines
20 KiB
Python
"""
|
|
Test Registry - Backlog Endpoints
|
|
|
|
Endpoints for failed test backlog management.
|
|
"""
|
|
|
|
from datetime import datetime
|
|
from typing import Optional
|
|
|
|
from fastapi import APIRouter, HTTPException, Query
|
|
|
|
from ...database import get_db_session
|
|
from ...repository import TestRepository
|
|
from ..api_models import (
|
|
BacklogStatusUpdate,
|
|
BacklogPriorityUpdate,
|
|
FixAttempt,
|
|
ManualBacklogEntry,
|
|
)
|
|
from ..config import (
|
|
get_test_runs,
|
|
get_persisted_results,
|
|
is_postgres_available,
|
|
migrate_json_to_postgres,
|
|
)
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
@router.get("/failed")
|
|
async def get_failed_tests():
|
|
"""
|
|
Gibt alle fehlgeschlagenen Tests aus den persistierten Ergebnissen zurueck.
|
|
Fuer Backlog-Verwaltung mit menschenverstaendlichen Fehlerbeschreibungen.
|
|
"""
|
|
persisted_results = get_persisted_results()
|
|
failed_tests = []
|
|
|
|
# Sammle fehlgeschlagene Tests aus persistierten Ergebnissen
|
|
for service, data in persisted_results.items():
|
|
run_time = data.get("last_run", "")
|
|
run_id = f"persisted_{service}"
|
|
|
|
# Hole fehlgeschlagene Test-IDs
|
|
for failed in data.get("failed_test_ids", []):
|
|
if isinstance(failed, dict):
|
|
failed_tests.append({
|
|
"id": failed.get("id", ""),
|
|
"name": failed.get("name", ""),
|
|
"service": service,
|
|
"file_path": failed.get("file_path", ""),
|
|
"line_number": failed.get("line_number"),
|
|
"error_message": failed.get("error_message", "Keine Fehlermeldung verfuegbar"),
|
|
"error_type": failed.get("error_type", "unknown"),
|
|
"suggestion": failed.get("suggestion", ""),
|
|
"run_id": run_id,
|
|
"last_failed": run_time,
|
|
"status": "open", # open, in_progress, fixed
|
|
})
|
|
elif isinstance(failed, str):
|
|
# Legacy-Format: nur Test-ID als String
|
|
failed_tests.append({
|
|
"id": failed,
|
|
"name": failed,
|
|
"service": service,
|
|
"file_path": "",
|
|
"line_number": None,
|
|
"error_message": "Keine Details verfuegbar",
|
|
"error_type": "unknown",
|
|
"suggestion": "",
|
|
"run_id": run_id,
|
|
"last_failed": run_time,
|
|
"status": "open",
|
|
})
|
|
|
|
# Dedupliziere nach Test-ID (nur neueste Version behalten)
|
|
seen = {}
|
|
for test in failed_tests:
|
|
test_id = test["id"]
|
|
if test_id not in seen or test["last_failed"] > seen[test_id]["last_failed"]:
|
|
seen[test_id] = test
|
|
|
|
unique_failed = list(seen.values())
|
|
|
|
# Gruppiere nach Service
|
|
by_service = {}
|
|
for test in unique_failed:
|
|
service = test["service"]
|
|
if service not in by_service:
|
|
by_service[service] = []
|
|
by_service[service].append(test)
|
|
|
|
return {
|
|
"total_failed": len(unique_failed),
|
|
"by_service": by_service,
|
|
"tests": unique_failed,
|
|
"last_updated": datetime.now().isoformat(),
|
|
}
|
|
|
|
|
|
@router.post("/failed/{test_id}/status")
|
|
async def update_failed_test_status(test_id: str, status: str):
|
|
"""
|
|
Aktualisiert den Status eines fehlgeschlagenen Tests.
|
|
Status: 'open', 'in_progress', 'fixed', 'wont_fix'
|
|
|
|
Legacy-Endpoint - nutzt nun PostgreSQL wenn verfuegbar.
|
|
"""
|
|
valid_statuses = ["open", "in_progress", "fixed", "wont_fix", "flaky"]
|
|
if status not in valid_statuses:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Ungueltiger Status. Erlaubt: {', '.join(valid_statuses)}"
|
|
)
|
|
|
|
# Versuche in PostgreSQL zu speichern
|
|
if is_postgres_available():
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
# Suche nach Backlog-Item mit test_id
|
|
backlog_items = repo.get_backlog()
|
|
for item in backlog_items:
|
|
if item.test_name == test_id or str(item.id) == test_id:
|
|
repo.update_backlog_status(item.id, status)
|
|
return {
|
|
"test_id": test_id,
|
|
"backlog_id": item.id,
|
|
"status": status,
|
|
"updated_at": datetime.now().isoformat(),
|
|
"message": f"Test-Status auf '{status}' gesetzt (PostgreSQL)",
|
|
}
|
|
except Exception as e:
|
|
print(f"PostgreSQL-Fehler: {e}")
|
|
|
|
# Fallback: nur Bestaetigung zurueckgeben
|
|
return {
|
|
"test_id": test_id,
|
|
"status": status,
|
|
"updated_at": datetime.now().isoformat(),
|
|
"message": f"Test-Status auf '{status}' gesetzt",
|
|
}
|
|
|
|
|
|
@router.get("/backlog")
|
|
async def get_backlog(
|
|
status: Optional[str] = Query(None, description="Filter nach Status: open, in_progress, fixed, wont_fix, flaky"),
|
|
service: Optional[str] = Query(None, description="Filter nach Service"),
|
|
priority: Optional[str] = Query(None, description="Filter nach Prioritaet: critical, high, medium, low"),
|
|
limit: int = Query(100, ge=1, le=500),
|
|
offset: int = Query(0, ge=0)
|
|
):
|
|
"""
|
|
Gibt den persistenten Backlog fehlgeschlagener Tests zurueck.
|
|
|
|
Der Backlog aggregiert fehlgeschlagene Tests ueber mehrere Runs hinweg
|
|
und ermoeglicht Status-Management (open -> in_progress -> fixed).
|
|
"""
|
|
if not is_postgres_available():
|
|
# Fallback auf legacy /failed Endpoint
|
|
return await get_failed_tests()
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
items = repo.get_backlog(
|
|
status=status,
|
|
service=service,
|
|
priority=priority,
|
|
limit=limit,
|
|
offset=offset
|
|
)
|
|
total = repo.get_backlog_count(status=status, service=service)
|
|
|
|
# Gruppiere nach Service
|
|
by_service = {}
|
|
for item in items:
|
|
svc = item.service
|
|
if svc not in by_service:
|
|
by_service[svc] = []
|
|
by_service[svc].append(item.to_dict())
|
|
|
|
return {
|
|
"total": total,
|
|
"items": [item.to_dict() for item in items],
|
|
"by_service": by_service,
|
|
"filters": {
|
|
"status": status,
|
|
"service": service,
|
|
"priority": priority
|
|
},
|
|
"pagination": {
|
|
"limit": limit,
|
|
"offset": offset
|
|
}
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.get("/backlog/{backlog_id}")
|
|
async def get_backlog_item(backlog_id: int):
|
|
"""
|
|
Gibt Details zu einem einzelnen Backlog-Eintrag zurueck.
|
|
Inklusive Fix-Historie.
|
|
"""
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
item = repo.get_backlog_item(backlog_id)
|
|
if not item:
|
|
raise HTTPException(status_code=404, detail=f"Backlog-Item {backlog_id} nicht gefunden")
|
|
|
|
# Hole Fix-Historie
|
|
fixes = repo.get_fix_history(backlog_id)
|
|
|
|
result = item.to_dict()
|
|
result["fixes"] = [fix.to_dict() for fix in fixes]
|
|
return result
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.post("/backlog/{backlog_id}/status")
|
|
async def update_backlog_item_status(backlog_id: int, update: BacklogStatusUpdate):
|
|
"""
|
|
Aktualisiert den Status eines Backlog-Eintrags.
|
|
|
|
Moegliche Status:
|
|
- open: Noch nicht bearbeitet
|
|
- in_progress: Wird gerade bearbeitet
|
|
- fixed: Test wurde gefixt
|
|
- wont_fix: Wird nicht gefixt (mit Begruendung)
|
|
- flaky: Flaky Test, wird separat behandelt
|
|
"""
|
|
valid_statuses = ["open", "in_progress", "fixed", "wont_fix", "flaky"]
|
|
if update.status not in valid_statuses:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Ungueltiger Status. Erlaubt: {', '.join(valid_statuses)}"
|
|
)
|
|
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
item = repo.update_backlog_status(
|
|
backlog_id=backlog_id,
|
|
status=update.status,
|
|
notes=update.notes,
|
|
assigned_to=update.assigned_to
|
|
)
|
|
if not item:
|
|
raise HTTPException(status_code=404, detail=f"Backlog-Item {backlog_id} nicht gefunden")
|
|
|
|
return item.to_dict()
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.post("/backlog/{backlog_id}/priority")
|
|
async def update_backlog_item_priority(backlog_id: int, update: BacklogPriorityUpdate):
|
|
"""
|
|
Aktualisiert die Prioritaet eines Backlog-Eintrags.
|
|
|
|
Moegliche Prioritaeten:
|
|
- critical: Kritisch - sofort beheben
|
|
- high: Hoch - bald beheben
|
|
- medium: Mittel - bei Gelegenheit
|
|
- low: Niedrig - irgendwann
|
|
"""
|
|
valid_priorities = ["critical", "high", "medium", "low"]
|
|
if update.priority not in valid_priorities:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Ungueltige Prioritaet. Erlaubt: {', '.join(valid_priorities)}"
|
|
)
|
|
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
item = repo.update_backlog_priority(backlog_id, update.priority)
|
|
if not item:
|
|
raise HTTPException(status_code=404, detail=f"Backlog-Item {backlog_id} nicht gefunden")
|
|
|
|
return item.to_dict()
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.post("/backlog/{backlog_id}/fix")
|
|
async def add_fix_attempt(backlog_id: int, fix: FixAttempt):
|
|
"""
|
|
Fuegt einen Fix-Versuch zur Historie hinzu.
|
|
|
|
Bei success=True wird der Backlog-Status automatisch auf 'fixed' gesetzt.
|
|
"""
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
valid_fix_types = ["manual", "auto_claude", "auto_script"]
|
|
if fix.fix_type not in valid_fix_types:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Ungueltiger Fix-Typ. Erlaubt: {', '.join(valid_fix_types)}"
|
|
)
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
|
|
# Pruefe ob Backlog-Item existiert
|
|
item = repo.get_backlog_item(backlog_id)
|
|
if not item:
|
|
raise HTTPException(status_code=404, detail=f"Backlog-Item {backlog_id} nicht gefunden")
|
|
|
|
# Fix-Versuch hinzufuegen
|
|
fix_record = repo.add_fix_attempt(
|
|
backlog_id=backlog_id,
|
|
fix_type=fix.fix_type,
|
|
fix_description=fix.fix_description,
|
|
commit_hash=fix.commit_hash,
|
|
success=fix.success
|
|
)
|
|
|
|
return {
|
|
"fix": fix_record.to_dict(),
|
|
"backlog_status": "fixed" if fix.success else item.status
|
|
}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.post("/backlog")
|
|
async def create_backlog_entry(entry: ManualBacklogEntry):
|
|
"""
|
|
Erstellt einen manuellen Backlog-Eintrag.
|
|
|
|
Nuetzlich fuer:
|
|
- Nicht-integrierte Features (xfail Tests)
|
|
- Bekannte Probleme die noch behoben werden muessen
|
|
- Feature Requests aus dem Test-Kontext
|
|
"""
|
|
from ...db_models import FailedTestBacklogDB
|
|
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
valid_priorities = ["critical", "high", "medium", "low"]
|
|
if entry.priority not in valid_priorities:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Ungueltige Prioritaet. Erlaubt: {', '.join(valid_priorities)}"
|
|
)
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
now = datetime.utcnow()
|
|
|
|
# Pruefe ob schon ein offener Eintrag existiert
|
|
existing = db.query(FailedTestBacklogDB).filter(
|
|
FailedTestBacklogDB.test_name == entry.test_name,
|
|
FailedTestBacklogDB.service == entry.service,
|
|
FailedTestBacklogDB.status == "open"
|
|
).first()
|
|
|
|
if existing:
|
|
# Aktualisiere existierenden Eintrag
|
|
existing.error_message = entry.error_message
|
|
existing.priority = entry.priority
|
|
existing.fix_suggestion = entry.fix_suggestion
|
|
existing.last_failed_at = now
|
|
db.commit()
|
|
return {
|
|
"id": existing.id,
|
|
"status": "updated",
|
|
"message": f"Existierender Backlog-Eintrag aktualisiert"
|
|
}
|
|
|
|
# Neuen Eintrag erstellen
|
|
backlog = FailedTestBacklogDB(
|
|
test_name=entry.test_name,
|
|
test_file=f"{entry.service}/",
|
|
service=entry.service,
|
|
framework="manual",
|
|
error_message=entry.error_message,
|
|
error_type="feature_not_integrated",
|
|
status="open",
|
|
priority=entry.priority,
|
|
fix_suggestion=entry.fix_suggestion,
|
|
first_failed_at=now,
|
|
last_failed_at=now,
|
|
failure_count=1
|
|
)
|
|
db.add(backlog)
|
|
db.commit()
|
|
db.refresh(backlog)
|
|
|
|
return {
|
|
"id": backlog.id,
|
|
"status": "created",
|
|
"message": f"Backlog-Eintrag erstellt: {entry.test_name}"
|
|
}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.get("/history")
|
|
async def get_test_history(
|
|
service: Optional[str] = Query(None, description="Filter nach Service"),
|
|
days: int = Query(30, ge=1, le=365, description="Anzahl Tage zurueck"),
|
|
limit: int = Query(100, ge=1, le=1000)
|
|
):
|
|
"""
|
|
Gibt die Test-Run Historie fuer Trend-Analysen zurueck.
|
|
|
|
Aggregiert Daten nach Tag und Service.
|
|
"""
|
|
test_runs = get_test_runs()
|
|
|
|
if not is_postgres_available():
|
|
# Fallback auf In-Memory Historie
|
|
sorted_runs = sorted(test_runs, key=lambda r: r["started_at"], reverse=True)
|
|
return {"runs": sorted_runs[:limit], "source": "memory"}
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
history = repo.get_run_history(
|
|
service=service,
|
|
days=days,
|
|
limit=limit
|
|
)
|
|
|
|
return {
|
|
"history": history,
|
|
"days": days,
|
|
"service": service,
|
|
"source": "postgresql"
|
|
}
|
|
except Exception as e:
|
|
# Fallback auf In-Memory
|
|
sorted_runs = sorted(test_runs, key=lambda r: r["started_at"], reverse=True)
|
|
return {"runs": sorted_runs[:limit], "source": "memory", "error": str(e)}
|
|
|
|
|
|
@router.get("/trends")
|
|
async def get_test_trends(
|
|
service: Optional[str] = Query(None, description="Filter nach Service"),
|
|
days: int = Query(14, ge=1, le=90, description="Anzahl Tage")
|
|
):
|
|
"""
|
|
Gibt Trend-Daten fuer Visualisierungen zurueck.
|
|
|
|
Zeigt Pass-Rate und Test-Anzahl ueber Zeit.
|
|
"""
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar fuer Trends")
|
|
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
history = repo.get_run_history(service=service, days=days, limit=days * 20)
|
|
|
|
# Aggregiere nach Tag
|
|
by_date = {}
|
|
for entry in history:
|
|
date = entry["date"]
|
|
if date not in by_date:
|
|
by_date[date] = {
|
|
"date": date,
|
|
"total_tests": 0,
|
|
"passed": 0,
|
|
"failed": 0,
|
|
"runs": 0
|
|
}
|
|
by_date[date]["total_tests"] += entry["total_tests"]
|
|
by_date[date]["passed"] += entry["passed"]
|
|
by_date[date]["failed"] += entry["failed"]
|
|
by_date[date]["runs"] += entry["runs"]
|
|
|
|
# Berechne Pass-Rate pro Tag
|
|
trends = []
|
|
for date, data in sorted(by_date.items()):
|
|
total = data["total_tests"]
|
|
data["pass_rate"] = round((data["passed"] / total * 100) if total > 0 else 0, 1)
|
|
trends.append(data)
|
|
|
|
return {
|
|
"trends": trends,
|
|
"days": days,
|
|
"service": service
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Datenbankfehler: {str(e)}")
|
|
|
|
|
|
@router.get("/stats")
|
|
async def get_aggregated_stats():
|
|
"""
|
|
Gibt aggregierte Statistiken ueber alle Services zurueck.
|
|
|
|
Kombiniert Daten aus PostgreSQL und Service-Definitionen.
|
|
"""
|
|
from ...models import TestRegistryStats
|
|
|
|
persisted_results = get_persisted_results()
|
|
|
|
if is_postgres_available():
|
|
try:
|
|
with get_db_session() as db:
|
|
repo = TestRepository(db)
|
|
summary = repo.get_summary_stats()
|
|
service_stats = repo.get_all_service_stats()
|
|
|
|
return {
|
|
"summary": summary,
|
|
"services": [s.to_dict() for s in service_stats],
|
|
"source": "postgresql"
|
|
}
|
|
except Exception as e:
|
|
print(f"PostgreSQL-Fehler: {e}")
|
|
|
|
# Fallback auf Legacy-Daten
|
|
stats = TestRegistryStats()
|
|
for service, data in persisted_results.items():
|
|
stats.total_tests += data.get("total", 0)
|
|
stats.total_passed += data.get("passed", 0)
|
|
stats.total_failed += data.get("failed", 0)
|
|
|
|
stats.services_count = len(persisted_results)
|
|
stats.overall_pass_rate = (stats.total_passed / stats.total_tests * 100) if stats.total_tests > 0 else 0
|
|
|
|
return {
|
|
"summary": {
|
|
"total_tests": stats.total_tests,
|
|
"total_passed": stats.total_passed,
|
|
"total_failed": stats.total_failed,
|
|
"total_skipped": stats.total_skipped,
|
|
"services_count": stats.services_count,
|
|
"overall_pass_rate": round(stats.overall_pass_rate, 1)
|
|
},
|
|
"services": list(persisted_results.keys()),
|
|
"source": "memory"
|
|
}
|
|
|
|
|
|
@router.post("/migrate")
|
|
async def trigger_migration():
|
|
"""
|
|
Migriert bestehende JSON-Daten nach PostgreSQL.
|
|
|
|
Einmalig ausfuehren um historische Daten zu uebernehmen.
|
|
"""
|
|
if not is_postgres_available():
|
|
raise HTTPException(status_code=503, detail="PostgreSQL nicht verfuegbar")
|
|
|
|
count = migrate_json_to_postgres()
|
|
return {
|
|
"migrated_services": count,
|
|
"message": f"{count} Services von JSON nach PostgreSQL migriert"
|
|
}
|