[split-required] Split 500-1000 LOC files across all services

backend-lehrer (5 files):
- alerts_agent/db/repository.py (992 → 5), abitur_docs_api.py (956 → 3)
- teacher_dashboard_api.py (951 → 3), services/pdf_service.py (916 → 3)
- mail/mail_db.py (987 → 6)

klausur-service (5 files):
- legal_templates_ingestion.py (942 → 3), ocr_pipeline_postprocess.py (929 → 4)
- ocr_pipeline_words.py (876 → 3), ocr_pipeline_ocr_merge.py (616 → 2)
- KorrekturPage.tsx (956 → 6)

website (5 pages):
- mail (985 → 9), edu-search (958 → 8), mac-mini (950 → 7)
- ocr-labeling (946 → 7), audit-workspace (871 → 4)

studio-v2 (5 files + 1 deleted):
- page.tsx (946 → 5), MessagesContext.tsx (925 → 4)
- korrektur (914 → 6), worksheet-cleanup (899 → 6)
- useVocabWorksheet.ts (888 → 3)
- Deleted dead page-original.tsx (934 LOC)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-04-24 23:35:37 +02:00
parent 6811264756
commit b6983ab1dc
99 changed files with 13484 additions and 16106 deletions

View File

@@ -0,0 +1,118 @@
"""
Mail Database - Statistics and Audit Log Operations.
"""
import json
import uuid
from typing import Optional, Dict
from datetime import datetime
from .mail_db_pool import get_pool
async def get_mail_stats(user_id: str) -> Dict:
"""Get overall mail statistics for a user."""
pool = await get_pool()
if pool is None:
return {}
try:
async with pool.acquire() as conn:
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
# Account stats
accounts = await conn.fetch(
"""
SELECT id, email, display_name, status, email_count, unread_count, last_sync
FROM external_email_accounts
WHERE user_id = $1
""",
user_id
)
# Email counts
email_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_emails,
COUNT(*) FILTER (WHERE is_read = FALSE) as unread_emails,
COUNT(*) FILTER (WHERE date_received >= $2) as emails_today,
COUNT(*) FILTER (WHERE ai_analyzed_at >= $2) as ai_analyses_today
FROM aggregated_emails
WHERE user_id = $1
""",
user_id, today
)
# Task counts
task_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_tasks,
COUNT(*) FILTER (WHERE status = 'pending') as pending_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline < NOW()) as overdue_tasks
FROM inbox_tasks
WHERE user_id = $1
""",
user_id
)
return {
"total_accounts": len(accounts),
"active_accounts": sum(1 for a in accounts if a['status'] == 'active'),
"error_accounts": sum(1 for a in accounts if a['status'] == 'error'),
"total_emails": email_stats['total_emails'] or 0,
"unread_emails": email_stats['unread_emails'] or 0,
"total_tasks": task_stats['total_tasks'] or 0,
"pending_tasks": task_stats['pending_tasks'] or 0,
"overdue_tasks": task_stats['overdue_tasks'] or 0,
"emails_today": email_stats['emails_today'] or 0,
"ai_analyses_today": email_stats['ai_analyses_today'] or 0,
"per_account": [
{
"id": a['id'],
"email": a['email'],
"display_name": a['display_name'],
"status": a['status'],
"email_count": a['email_count'],
"unread_count": a['unread_count'],
"last_sync": a['last_sync'].isoformat() if a['last_sync'] else None,
}
for a in accounts
],
}
except Exception as e:
print(f"Failed to get mail stats: {e}")
return {}
async def log_mail_audit(
user_id: str,
action: str,
entity_type: Optional[str] = None,
entity_id: Optional[str] = None,
details: Optional[Dict] = None,
tenant_id: Optional[str] = None,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
) -> bool:
"""Log a mail action for audit trail."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO mail_audit_log
(id, user_id, tenant_id, action, entity_type, entity_id, details, ip_address, user_agent)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
""",
str(uuid.uuid4()), user_id, tenant_id, action, entity_type, entity_id,
json.dumps(details) if details else None, ip_address, user_agent
)
return True
except Exception as e:
print(f"Failed to log mail audit: {e}")
return False