[split-required] Split 500-1000 LOC files across all services
backend-lehrer (5 files): - alerts_agent/db/repository.py (992 → 5), abitur_docs_api.py (956 → 3) - teacher_dashboard_api.py (951 → 3), services/pdf_service.py (916 → 3) - mail/mail_db.py (987 → 6) klausur-service (5 files): - legal_templates_ingestion.py (942 → 3), ocr_pipeline_postprocess.py (929 → 4) - ocr_pipeline_words.py (876 → 3), ocr_pipeline_ocr_merge.py (616 → 2) - KorrekturPage.tsx (956 → 6) website (5 pages): - mail (985 → 9), edu-search (958 → 8), mac-mini (950 → 7) - ocr-labeling (946 → 7), audit-workspace (871 → 4) studio-v2 (5 files + 1 deleted): - page.tsx (946 → 5), MessagesContext.tsx (925 → 4) - korrektur (914 → 6), worksheet-cleanup (899 → 6) - useVocabWorksheet.ts (888 → 3) - Deleted dead page-original.tsx (934 LOC) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
225
klausur-service/backend/mail/mail_db_emails.py
Normal file
225
klausur-service/backend/mail/mail_db_emails.py
Normal file
@@ -0,0 +1,225 @@
|
||||
"""
|
||||
Mail Database - Aggregated Email Operations.
|
||||
"""
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from typing import Optional, List, Dict
|
||||
from datetime import datetime
|
||||
|
||||
from .mail_db_pool import get_pool
|
||||
|
||||
|
||||
async def upsert_email(
|
||||
account_id: str,
|
||||
user_id: str,
|
||||
tenant_id: str,
|
||||
message_id: str,
|
||||
subject: str,
|
||||
sender_email: str,
|
||||
sender_name: Optional[str],
|
||||
recipients: List[str],
|
||||
cc: List[str],
|
||||
body_preview: Optional[str],
|
||||
body_text: Optional[str],
|
||||
body_html: Optional[str],
|
||||
has_attachments: bool,
|
||||
attachments: List[Dict],
|
||||
headers: Dict,
|
||||
folder: str,
|
||||
date_sent: datetime,
|
||||
date_received: datetime,
|
||||
) -> Optional[str]:
|
||||
"""Insert or update an email. Returns the email ID."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return None
|
||||
|
||||
email_id = str(uuid.uuid4())
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
INSERT INTO aggregated_emails
|
||||
(id, account_id, user_id, tenant_id, message_id, subject,
|
||||
sender_email, sender_name, recipients, cc, body_preview,
|
||||
body_text, body_html, has_attachments, attachments, headers,
|
||||
folder, date_sent, date_received)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
|
||||
ON CONFLICT (account_id, message_id) DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
is_read = EXCLUDED.is_read,
|
||||
folder = EXCLUDED.folder
|
||||
RETURNING id
|
||||
""",
|
||||
email_id, account_id, user_id, tenant_id, message_id, subject,
|
||||
sender_email, sender_name, json.dumps(recipients), json.dumps(cc),
|
||||
body_preview, body_text, body_html, has_attachments,
|
||||
json.dumps(attachments), json.dumps(headers), folder,
|
||||
date_sent, date_received
|
||||
)
|
||||
return row['id'] if row else None
|
||||
except Exception as e:
|
||||
print(f"Failed to upsert email: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def get_unified_inbox(
|
||||
user_id: str,
|
||||
account_ids: Optional[List[str]] = None,
|
||||
categories: Optional[List[str]] = None,
|
||||
is_read: Optional[bool] = None,
|
||||
is_starred: Optional[bool] = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> List[Dict]:
|
||||
"""Get unified inbox with filtering."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return []
|
||||
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
conditions = ["user_id = $1", "is_deleted = FALSE"]
|
||||
params = [user_id]
|
||||
param_idx = 2
|
||||
|
||||
if account_ids:
|
||||
conditions.append(f"account_id = ANY(${param_idx})")
|
||||
params.append(account_ids)
|
||||
param_idx += 1
|
||||
|
||||
if categories:
|
||||
conditions.append(f"category = ANY(${param_idx})")
|
||||
params.append(categories)
|
||||
param_idx += 1
|
||||
|
||||
if is_read is not None:
|
||||
conditions.append(f"is_read = ${param_idx}")
|
||||
params.append(is_read)
|
||||
param_idx += 1
|
||||
|
||||
if is_starred is not None:
|
||||
conditions.append(f"is_starred = ${param_idx}")
|
||||
params.append(is_starred)
|
||||
param_idx += 1
|
||||
|
||||
where_clause = " AND ".join(conditions)
|
||||
params.extend([limit, offset])
|
||||
|
||||
query = f"""
|
||||
SELECT e.*, a.email as account_email, a.display_name as account_name
|
||||
FROM aggregated_emails e
|
||||
JOIN external_email_accounts a ON e.account_id = a.id
|
||||
WHERE {where_clause}
|
||||
ORDER BY e.date_received DESC
|
||||
LIMIT ${param_idx} OFFSET ${param_idx + 1}
|
||||
"""
|
||||
|
||||
rows = await conn.fetch(query, *params)
|
||||
return [dict(r) for r in rows]
|
||||
except Exception as e:
|
||||
print(f"Failed to get unified inbox: {e}")
|
||||
return []
|
||||
|
||||
|
||||
async def get_email(email_id: str, user_id: str) -> Optional[Dict]:
|
||||
"""Get a single email by ID."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
SELECT e.*, a.email as account_email, a.display_name as account_name
|
||||
FROM aggregated_emails e
|
||||
JOIN external_email_accounts a ON e.account_id = a.id
|
||||
WHERE e.id = $1 AND e.user_id = $2
|
||||
""",
|
||||
email_id, user_id
|
||||
)
|
||||
return dict(row) if row else None
|
||||
except Exception as e:
|
||||
print(f"Failed to get email: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def update_email_ai_analysis(
|
||||
email_id: str,
|
||||
category: str,
|
||||
sender_type: str,
|
||||
sender_authority_name: Optional[str],
|
||||
detected_deadlines: List[Dict],
|
||||
suggested_priority: str,
|
||||
ai_summary: Optional[str],
|
||||
) -> bool:
|
||||
"""Update email with AI analysis results."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"""
|
||||
UPDATE aggregated_emails SET
|
||||
category = $2,
|
||||
sender_type = $3,
|
||||
sender_authority_name = $4,
|
||||
detected_deadlines = $5,
|
||||
suggested_priority = $6,
|
||||
ai_summary = $7,
|
||||
ai_analyzed_at = NOW()
|
||||
WHERE id = $1
|
||||
""",
|
||||
email_id, category, sender_type, sender_authority_name,
|
||||
json.dumps(detected_deadlines), suggested_priority, ai_summary
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Failed to update email AI analysis: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def mark_email_read(email_id: str, user_id: str, is_read: bool = True) -> bool:
|
||||
"""Mark email as read/unread."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"""
|
||||
UPDATE aggregated_emails SET is_read = $3
|
||||
WHERE id = $1 AND user_id = $2
|
||||
""",
|
||||
email_id, user_id, is_read
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Failed to mark email read: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def mark_email_starred(email_id: str, user_id: str, is_starred: bool = True) -> bool:
|
||||
"""Mark email as starred/unstarred."""
|
||||
pool = await get_pool()
|
||||
if pool is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"""
|
||||
UPDATE aggregated_emails SET is_starred = $3
|
||||
WHERE id = $1 AND user_id = $2
|
||||
""",
|
||||
email_id, user_id, is_starred
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Failed to mark email starred: {e}")
|
||||
return False
|
||||
Reference in New Issue
Block a user