fix: Restore all files lost during destructive rebase

A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-02-09 09:51:32 +01:00
parent f7487ee240
commit bfdaf63ba9
2009 changed files with 749983 additions and 1731 deletions

View File

@@ -0,0 +1,106 @@
"""
Unified Inbox Mail Module
Multi-Account IMAP aggregation with KI-powered analysis.
Features:
- Multi-account IMAP aggregation
- Secure credential storage (Vault/encrypted)
- KI-powered email analysis (sender classification, deadline extraction)
- Arbeitsvorrat (task management) with deadline tracking
- Response suggestions
Usage:
from mail.api import router as mail_router
app.include_router(mail_router)
API Endpoints:
POST /api/v1/mail/init - Initialize database tables
POST /api/v1/mail/accounts - Create email account
GET /api/v1/mail/accounts - List accounts
GET /api/v1/mail/inbox - Get unified inbox
POST /api/v1/mail/analyze/{id} - Analyze email with AI
GET /api/v1/mail/tasks - Get tasks (Arbeitsvorrat)
GET /api/v1/mail/tasks/dashboard - Dashboard statistics
"""
from .models import (
# Enums
AccountStatus,
TaskStatus,
TaskPriority,
EmailCategory,
SenderType,
# Account models
EmailAccountCreate,
EmailAccountUpdate,
EmailAccount,
AccountTestResult,
# Email models
AggregatedEmail,
EmailSearchParams,
EmailComposeRequest,
EmailSendResult,
# Task models
TaskCreate,
TaskUpdate,
InboxTask,
TaskDashboardStats,
# AI models
SenderClassification,
DeadlineExtraction,
EmailAnalysisResult,
ResponseSuggestion,
# Stats
MailStats,
MailHealthCheck,
# Templates
EmailTemplate,
EmailTemplateCreate,
)
from .api import router
from .aggregator import get_mail_aggregator
from .ai_service import get_ai_email_service
from .task_service import get_task_service
from .credentials import get_credentials_service
from .mail_db import init_mail_tables
__all__ = [
# Router
"router",
# Services
"get_mail_aggregator",
"get_ai_email_service",
"get_task_service",
"get_credentials_service",
# Database
"init_mail_tables",
# Enums
"AccountStatus",
"TaskStatus",
"TaskPriority",
"EmailCategory",
"SenderType",
# Models
"EmailAccountCreate",
"EmailAccountUpdate",
"EmailAccount",
"AccountTestResult",
"AggregatedEmail",
"EmailSearchParams",
"EmailComposeRequest",
"EmailSendResult",
"TaskCreate",
"TaskUpdate",
"InboxTask",
"TaskDashboardStats",
"SenderClassification",
"DeadlineExtraction",
"EmailAnalysisResult",
"ResponseSuggestion",
"MailStats",
"MailHealthCheck",
"EmailTemplate",
"EmailTemplateCreate",
]

View File

@@ -0,0 +1,541 @@
"""
Mail Aggregator Service
Multi-account IMAP aggregation with async support.
"""
import os
import ssl
import email
import asyncio
import logging
import smtplib
from typing import Optional, List, Dict, Any, Tuple
from datetime import datetime, timezone
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.header import decode_header, make_header
from email.utils import parsedate_to_datetime, parseaddr
from .credentials import get_credentials_service, MailCredentials
from .mail_db import (
get_email_accounts,
get_email_account,
update_account_status,
upsert_email,
get_unified_inbox,
)
from .models import (
AccountStatus,
AccountTestResult,
AggregatedEmail,
EmailComposeRequest,
EmailSendResult,
)
logger = logging.getLogger(__name__)
class IMAPConnectionError(Exception):
"""Raised when IMAP connection fails."""
pass
class SMTPConnectionError(Exception):
"""Raised when SMTP connection fails."""
pass
class MailAggregator:
"""
Aggregates emails from multiple IMAP accounts into a unified inbox.
Features:
- Connect to multiple IMAP accounts
- Fetch and cache emails in PostgreSQL
- Send emails via SMTP
- Handle connection pooling
"""
def __init__(self):
self._credentials_service = get_credentials_service()
self._imap_connections: Dict[str, Any] = {}
self._sync_lock = asyncio.Lock()
async def test_account_connection(
self,
imap_host: str,
imap_port: int,
imap_ssl: bool,
smtp_host: str,
smtp_port: int,
smtp_ssl: bool,
email_address: str,
password: str,
) -> AccountTestResult:
"""
Test IMAP and SMTP connection with provided credentials.
Returns:
AccountTestResult with connection status
"""
result = AccountTestResult(
success=False,
imap_connected=False,
smtp_connected=False,
)
# Test IMAP
try:
import imaplib
if imap_ssl:
imap = imaplib.IMAP4_SSL(imap_host, imap_port)
else:
imap = imaplib.IMAP4(imap_host, imap_port)
imap.login(email_address, password)
result.imap_connected = True
# List folders
status, folders = imap.list()
if status == "OK":
result.folders_found = [
self._parse_folder_name(f) for f in folders if f
]
imap.logout()
except Exception as e:
result.error_message = f"IMAP Error: {str(e)}"
logger.warning(f"IMAP test failed for {email_address}: {e}")
# Test SMTP
try:
if smtp_ssl:
smtp = smtplib.SMTP_SSL(smtp_host, smtp_port)
else:
smtp = smtplib.SMTP(smtp_host, smtp_port)
smtp.starttls()
smtp.login(email_address, password)
result.smtp_connected = True
smtp.quit()
except Exception as e:
smtp_error = f"SMTP Error: {str(e)}"
if result.error_message:
result.error_message += f"; {smtp_error}"
else:
result.error_message = smtp_error
logger.warning(f"SMTP test failed for {email_address}: {e}")
result.success = result.imap_connected and result.smtp_connected
return result
def _parse_folder_name(self, folder_response: bytes) -> str:
"""Parse folder name from IMAP LIST response."""
try:
# Format: '(\\HasNoChildren) "/" "INBOX"'
decoded = folder_response.decode("utf-8") if isinstance(folder_response, bytes) else folder_response
parts = decoded.rsplit('" "', 1)
if len(parts) == 2:
return parts[1].rstrip('"')
return decoded
except Exception:
return str(folder_response)
async def sync_account(
self,
account_id: str,
user_id: str,
max_emails: int = 100,
folders: Optional[List[str]] = None,
) -> Tuple[int, int]:
"""
Sync emails from an IMAP account.
Args:
account_id: The account ID
user_id: The user ID
max_emails: Maximum emails to fetch
folders: Specific folders to sync (default: INBOX)
Returns:
Tuple of (new_emails, total_emails)
"""
import imaplib
account = await get_email_account(account_id, user_id)
if not account:
raise ValueError(f"Account not found: {account_id}")
# Get credentials
vault_path = account.get("vault_path", "")
creds = await self._credentials_service.get_credentials(account_id, vault_path)
if not creds:
await update_account_status(account_id, "error", "Credentials not found")
raise IMAPConnectionError("Credentials not found")
new_count = 0
total_count = 0
try:
# Connect to IMAP
if account["imap_ssl"]:
imap = imaplib.IMAP4_SSL(account["imap_host"], account["imap_port"])
else:
imap = imaplib.IMAP4(account["imap_host"], account["imap_port"])
imap.login(creds.email, creds.password)
# Sync specified folders or just INBOX
sync_folders = folders or ["INBOX"]
for folder in sync_folders:
try:
status, _ = imap.select(folder)
if status != "OK":
continue
# Search for recent emails
status, messages = imap.search(None, "ALL")
if status != "OK":
continue
message_ids = messages[0].split()
total_count += len(message_ids)
# Fetch most recent emails
recent_ids = message_ids[-max_emails:] if len(message_ids) > max_emails else message_ids
for msg_id in recent_ids:
try:
email_data = await self._fetch_and_store_email(
imap, msg_id, account_id, user_id, account["tenant_id"], folder
)
if email_data:
new_count += 1
except Exception as e:
logger.warning(f"Failed to fetch email {msg_id}: {e}")
except Exception as e:
logger.warning(f"Failed to sync folder {folder}: {e}")
imap.logout()
# Update account status
await update_account_status(
account_id,
"active",
email_count=total_count,
unread_count=new_count, # Will be recalculated
)
return new_count, total_count
except Exception as e:
logger.error(f"Account sync failed: {e}")
await update_account_status(account_id, "error", str(e))
raise IMAPConnectionError(str(e))
async def _fetch_and_store_email(
self,
imap,
msg_id: bytes,
account_id: str,
user_id: str,
tenant_id: str,
folder: str,
) -> Optional[str]:
"""Fetch a single email and store it in the database."""
try:
status, msg_data = imap.fetch(msg_id, "(RFC822)")
if status != "OK" or not msg_data or not msg_data[0]:
return None
raw_email = msg_data[0][1]
msg = email.message_from_bytes(raw_email)
# Parse headers
message_id = msg.get("Message-ID", str(msg_id))
subject = self._decode_header(msg.get("Subject", ""))
from_header = msg.get("From", "")
sender_name, sender_email = parseaddr(from_header)
sender_name = self._decode_header(sender_name)
# Parse recipients
to_header = msg.get("To", "")
recipients = [addr[1] for addr in email.utils.getaddresses([to_header])]
cc_header = msg.get("Cc", "")
cc = [addr[1] for addr in email.utils.getaddresses([cc_header])]
# Parse dates
date_str = msg.get("Date")
try:
date_sent = parsedate_to_datetime(date_str) if date_str else datetime.now(timezone.utc)
except Exception:
date_sent = datetime.now(timezone.utc)
date_received = datetime.now(timezone.utc)
# Parse body
body_text, body_html, attachments = self._parse_body(msg)
# Create preview
body_preview = (body_text[:200] + "...") if body_text and len(body_text) > 200 else body_text
# Get headers dict
headers = {k: self._decode_header(v) for k, v in msg.items() if k not in ["Body"]}
# Store in database
email_id = await upsert_email(
account_id=account_id,
user_id=user_id,
tenant_id=tenant_id,
message_id=message_id,
subject=subject,
sender_email=sender_email,
sender_name=sender_name,
recipients=recipients,
cc=cc,
body_preview=body_preview,
body_text=body_text,
body_html=body_html,
has_attachments=len(attachments) > 0,
attachments=attachments,
headers=headers,
folder=folder,
date_sent=date_sent,
date_received=date_received,
)
return email_id
except Exception as e:
logger.error(f"Failed to parse email: {e}")
return None
def _decode_header(self, header_value: str) -> str:
"""Decode email header value."""
if not header_value:
return ""
try:
decoded = decode_header(header_value)
return str(make_header(decoded))
except Exception:
return str(header_value)
def _parse_body(self, msg) -> Tuple[Optional[str], Optional[str], List[Dict]]:
"""
Parse email body and attachments.
Returns:
Tuple of (body_text, body_html, attachments)
"""
body_text = None
body_html = None
attachments = []
if msg.is_multipart():
for part in msg.walk():
content_type = part.get_content_type()
content_disposition = str(part.get("Content-Disposition", ""))
# Skip multipart containers
if content_type.startswith("multipart/"):
continue
# Check for attachments
if "attachment" in content_disposition:
filename = part.get_filename()
if filename:
attachments.append({
"filename": self._decode_header(filename),
"content_type": content_type,
"size": len(part.get_payload(decode=True) or b""),
})
continue
# Get body content
try:
payload = part.get_payload(decode=True)
charset = part.get_content_charset() or "utf-8"
if payload:
text = payload.decode(charset, errors="replace")
if content_type == "text/plain" and not body_text:
body_text = text
elif content_type == "text/html" and not body_html:
body_html = text
except Exception as e:
logger.debug(f"Failed to decode body part: {e}")
else:
# Single part message
content_type = msg.get_content_type()
try:
payload = msg.get_payload(decode=True)
charset = msg.get_content_charset() or "utf-8"
if payload:
text = payload.decode(charset, errors="replace")
if content_type == "text/plain":
body_text = text
elif content_type == "text/html":
body_html = text
except Exception as e:
logger.debug(f"Failed to decode body: {e}")
return body_text, body_html, attachments
async def send_email(
self,
account_id: str,
user_id: str,
request: EmailComposeRequest,
) -> EmailSendResult:
"""
Send an email via SMTP.
Args:
account_id: The account to send from
user_id: The user ID
request: The compose request with recipients and content
Returns:
EmailSendResult with success status
"""
account = await get_email_account(account_id, user_id)
if not account:
return EmailSendResult(success=False, error="Account not found")
# Verify the account_id matches
if request.account_id != account_id:
return EmailSendResult(success=False, error="Account mismatch")
# Get credentials
vault_path = account.get("vault_path", "")
creds = await self._credentials_service.get_credentials(account_id, vault_path)
if not creds:
return EmailSendResult(success=False, error="Credentials not found")
try:
# Create message
if request.is_html:
msg = MIMEMultipart("alternative")
msg.attach(MIMEText(request.body, "html"))
else:
msg = MIMEText(request.body, "plain")
msg["Subject"] = request.subject
msg["From"] = account["email"]
msg["To"] = ", ".join(request.to)
if request.cc:
msg["Cc"] = ", ".join(request.cc)
if request.reply_to_message_id:
msg["In-Reply-To"] = request.reply_to_message_id
msg["References"] = request.reply_to_message_id
# Send via SMTP
if account["smtp_ssl"]:
smtp = smtplib.SMTP_SSL(account["smtp_host"], account["smtp_port"])
else:
smtp = smtplib.SMTP(account["smtp_host"], account["smtp_port"])
smtp.starttls()
smtp.login(creds.email, creds.password)
# All recipients
all_recipients = list(request.to)
if request.cc:
all_recipients.extend(request.cc)
if request.bcc:
all_recipients.extend(request.bcc)
smtp.sendmail(account["email"], all_recipients, msg.as_string())
smtp.quit()
return EmailSendResult(
success=True,
message_id=msg.get("Message-ID"),
)
except Exception as e:
logger.error(f"Failed to send email: {e}")
return EmailSendResult(success=False, error=str(e))
async def sync_all_accounts(self, user_id: str, tenant_id: Optional[str] = None) -> Dict[str, Any]:
"""
Sync all accounts for a user.
Returns:
Dict with sync results per account
"""
async with self._sync_lock:
accounts = await get_email_accounts(user_id, tenant_id)
results = {}
for account in accounts:
account_id = account["id"]
try:
new_count, total_count = await self.sync_account(
account_id, user_id, max_emails=50
)
results[account_id] = {
"status": "success",
"new_emails": new_count,
"total_emails": total_count,
}
except Exception as e:
results[account_id] = {
"status": "error",
"error": str(e),
}
return results
async def get_unified_inbox_emails(
self,
user_id: str,
account_ids: Optional[List[str]] = None,
categories: Optional[List[str]] = None,
is_read: Optional[bool] = None,
is_starred: Optional[bool] = None,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""
Get unified inbox with all filters.
Returns:
List of email dictionaries
"""
return await get_unified_inbox(
user_id=user_id,
account_ids=account_ids,
categories=categories,
is_read=is_read,
is_starred=is_starred,
limit=limit,
offset=offset,
)
# Global instance
_aggregator: Optional[MailAggregator] = None
def get_mail_aggregator() -> MailAggregator:
"""Get or create the global MailAggregator instance."""
global _aggregator
if _aggregator is None:
_aggregator = MailAggregator()
return _aggregator

View File

@@ -0,0 +1,747 @@
"""
AI Email Analysis Service
KI-powered email analysis with:
- Sender classification (authority recognition)
- Deadline extraction
- Category classification
- Response suggestions
"""
import os
import re
import logging
from typing import Optional, List, Dict, Any, Tuple
from datetime import datetime, timedelta
import httpx
from .models import (
EmailCategory,
SenderType,
TaskPriority,
SenderClassification,
DeadlineExtraction,
EmailAnalysisResult,
ResponseSuggestion,
KNOWN_AUTHORITIES_NI,
classify_sender_by_domain,
get_priority_from_sender_type,
)
from .mail_db import update_email_ai_analysis
logger = logging.getLogger(__name__)
# LLM Gateway configuration
LLM_GATEWAY_URL = os.getenv("LLM_GATEWAY_URL", "http://localhost:8090")
class AIEmailService:
"""
AI-powered email analysis service.
Features:
- Domain-based sender classification (fast, no LLM)
- LLM-based sender classification (fallback)
- Deadline extraction using regex + LLM
- Category classification
- Response suggestions
"""
def __init__(self):
self._http_client = None
async def get_http_client(self) -> httpx.AsyncClient:
"""Get or create HTTP client for LLM gateway."""
if self._http_client is None:
self._http_client = httpx.AsyncClient(timeout=30.0)
return self._http_client
# =========================================================================
# Sender Classification
# =========================================================================
async def classify_sender(
self,
sender_email: str,
sender_name: Optional[str] = None,
subject: Optional[str] = None,
body_preview: Optional[str] = None,
) -> SenderClassification:
"""
Classify the sender of an email.
First tries domain matching, then falls back to LLM.
Args:
sender_email: Sender's email address
sender_name: Sender's display name
subject: Email subject
body_preview: First 200 chars of body
Returns:
SenderClassification with type and confidence
"""
# Try domain-based classification first (fast, high confidence)
domain_result = classify_sender_by_domain(sender_email)
if domain_result:
return domain_result
# Fall back to LLM classification
return await self._classify_sender_llm(
sender_email, sender_name, subject, body_preview
)
async def _classify_sender_llm(
self,
sender_email: str,
sender_name: Optional[str],
subject: Optional[str],
body_preview: Optional[str],
) -> SenderClassification:
"""Classify sender using LLM."""
try:
client = await self.get_http_client()
prompt = f"""Analysiere den Absender dieser E-Mail und klassifiziere ihn:
Absender E-Mail: {sender_email}
Absender Name: {sender_name or "Nicht angegeben"}
Betreff: {subject or "Nicht angegeben"}
Vorschau: {body_preview[:200] if body_preview else "Nicht verfügbar"}
Klassifiziere den Absender in EINE der folgenden Kategorien:
- kultusministerium: Kultusministerium/Bildungsministerium
- landesschulbehoerde: Landesschulbehörde
- rlsb: Regionales Landesamt für Schule und Bildung
- schulamt: Schulamt
- nibis: Niedersächsischer Bildungsserver
- schultraeger: Schulträger/Kommune
- elternvertreter: Elternvertreter/Elternrat
- gewerkschaft: Gewerkschaft (GEW, VBE, etc.)
- fortbildungsinstitut: Fortbildungsinstitut (NLQ, etc.)
- privatperson: Privatperson
- unternehmen: Unternehmen/Firma
- unbekannt: Nicht einzuordnen
Antworte NUR mit dem Kategorienamen (z.B. "kultusministerium") und einer Konfidenz von 0.0 bis 1.0.
Format: kategorie|konfidenz|kurze_begründung
"""
response = await client.post(
f"{LLM_GATEWAY_URL}/api/v1/inference",
json={
"prompt": prompt,
"playbook": "mail_analysis",
"max_tokens": 100,
},
)
if response.status_code == 200:
data = response.json()
result_text = data.get("response", "unbekannt|0.5|")
# Parse response
parts = result_text.strip().split("|")
if len(parts) >= 2:
sender_type_str = parts[0].strip().lower()
confidence = float(parts[1].strip())
# Map to enum
type_mapping = {
"kultusministerium": SenderType.KULTUSMINISTERIUM,
"landesschulbehoerde": SenderType.LANDESSCHULBEHOERDE,
"rlsb": SenderType.RLSB,
"schulamt": SenderType.SCHULAMT,
"nibis": SenderType.NIBIS,
"schultraeger": SenderType.SCHULTRAEGER,
"elternvertreter": SenderType.ELTERNVERTRETER,
"gewerkschaft": SenderType.GEWERKSCHAFT,
"fortbildungsinstitut": SenderType.FORTBILDUNGSINSTITUT,
"privatperson": SenderType.PRIVATPERSON,
"unternehmen": SenderType.UNTERNEHMEN,
}
sender_type = type_mapping.get(sender_type_str, SenderType.UNBEKANNT)
return SenderClassification(
sender_type=sender_type,
confidence=min(max(confidence, 0.0), 1.0),
domain_matched=False,
ai_classified=True,
)
except Exception as e:
logger.warning(f"LLM sender classification failed: {e}")
# Default fallback
return SenderClassification(
sender_type=SenderType.UNBEKANNT,
confidence=0.3,
domain_matched=False,
ai_classified=False,
)
# =========================================================================
# Deadline Extraction
# =========================================================================
async def extract_deadlines(
self,
subject: str,
body_text: str,
) -> List[DeadlineExtraction]:
"""
Extract deadlines from email content.
Uses regex patterns first, then LLM for complex cases.
Args:
subject: Email subject
body_text: Email body text
Returns:
List of extracted deadlines
"""
deadlines = []
# Combine subject and body
full_text = f"{subject}\n{body_text}" if body_text else subject
# Try regex extraction first
regex_deadlines = self._extract_deadlines_regex(full_text)
deadlines.extend(regex_deadlines)
# If no regex matches, try LLM
if not deadlines and body_text:
llm_deadlines = await self._extract_deadlines_llm(subject, body_text[:1000])
deadlines.extend(llm_deadlines)
return deadlines
def _extract_deadlines_regex(self, text: str) -> List[DeadlineExtraction]:
"""Extract deadlines using regex patterns."""
deadlines = []
now = datetime.now()
# German date patterns
patterns = [
# "bis zum 15.01.2025"
(r"bis\s+(?:zum\s+)?(\d{1,2})\.(\d{1,2})\.(\d{2,4})", True),
# "spätestens am 15.01.2025"
(r"spätestens\s+(?:am\s+)?(\d{1,2})\.(\d{1,2})\.(\d{2,4})", True),
# "Abgabetermin: 15.01.2025"
(r"(?:Abgabe|Termin|Frist)[:\s]+(\d{1,2})\.(\d{1,2})\.(\d{2,4})", True),
# "innerhalb von 14 Tagen"
(r"innerhalb\s+von\s+(\d+)\s+(?:Tagen|Wochen)", False),
# "bis Ende Januar"
(r"bis\s+(?:Ende\s+)?(Januar|Februar|März|April|Mai|Juni|Juli|August|September|Oktober|November|Dezember)", False),
]
for pattern, is_specific_date in patterns:
matches = re.finditer(pattern, text, re.IGNORECASE)
for match in matches:
try:
if is_specific_date:
day = int(match.group(1))
month = int(match.group(2))
year = int(match.group(3))
# Handle 2-digit years
if year < 100:
year += 2000
deadline_date = datetime(year, month, day)
# Skip past dates
if deadline_date < now:
continue
# Get surrounding context
start = max(0, match.start() - 50)
end = min(len(text), match.end() + 50)
context = text[start:end].strip()
deadlines.append(DeadlineExtraction(
deadline_date=deadline_date,
description=f"Frist: {match.group(0)}",
confidence=0.85,
source_text=context,
is_firm=True,
))
else:
# Relative dates (innerhalb von X Tagen)
if "Tagen" in pattern or "Wochen" in pattern:
days = int(match.group(1))
if "Wochen" in match.group(0).lower():
days *= 7
deadline_date = now + timedelta(days=days)
deadlines.append(DeadlineExtraction(
deadline_date=deadline_date,
description=f"Relative Frist: {match.group(0)}",
confidence=0.7,
source_text=match.group(0),
is_firm=False,
))
except (ValueError, IndexError) as e:
logger.debug(f"Failed to parse date: {e}")
continue
return deadlines
async def _extract_deadlines_llm(
self,
subject: str,
body_preview: str,
) -> List[DeadlineExtraction]:
"""Extract deadlines using LLM."""
try:
client = await self.get_http_client()
prompt = f"""Analysiere diese E-Mail und extrahiere alle genannten Fristen und Termine:
Betreff: {subject}
Inhalt: {body_preview}
Liste alle Fristen im folgenden Format auf (eine pro Zeile):
DATUM|BESCHREIBUNG|VERBINDLICH
Beispiel: 2025-01-15|Abgabe der Berichte|ja
Wenn keine Fristen gefunden werden, antworte mit: KEINE_FRISTEN
Antworte NUR im angegebenen Format.
"""
response = await client.post(
f"{LLM_GATEWAY_URL}/api/v1/inference",
json={
"prompt": prompt,
"playbook": "mail_analysis",
"max_tokens": 200,
},
)
if response.status_code == 200:
data = response.json()
result_text = data.get("response", "")
if "KEINE_FRISTEN" in result_text:
return []
deadlines = []
for line in result_text.strip().split("\n"):
parts = line.split("|")
if len(parts) >= 2:
try:
date_str = parts[0].strip()
deadline_date = datetime.fromisoformat(date_str)
description = parts[1].strip()
is_firm = parts[2].strip().lower() == "ja" if len(parts) > 2 else True
deadlines.append(DeadlineExtraction(
deadline_date=deadline_date,
description=description,
confidence=0.7,
source_text=line,
is_firm=is_firm,
))
except (ValueError, IndexError):
continue
return deadlines
except Exception as e:
logger.warning(f"LLM deadline extraction failed: {e}")
return []
# =========================================================================
# Email Category Classification
# =========================================================================
async def classify_category(
self,
subject: str,
body_preview: str,
sender_type: SenderType,
) -> Tuple[EmailCategory, float]:
"""
Classify email into a category.
Args:
subject: Email subject
body_preview: First 200 chars of body
sender_type: Already classified sender type
Returns:
Tuple of (category, confidence)
"""
# Rule-based classification first
category, confidence = self._classify_category_rules(subject, body_preview, sender_type)
if confidence > 0.7:
return category, confidence
# Fall back to LLM
return await self._classify_category_llm(subject, body_preview)
def _classify_category_rules(
self,
subject: str,
body_preview: str,
sender_type: SenderType,
) -> Tuple[EmailCategory, float]:
"""Rule-based category classification."""
text = f"{subject} {body_preview}".lower()
# Keywords for each category
category_keywords = {
EmailCategory.DIENSTLICH: [
"dienstlich", "dienstanweisung", "erlass", "verordnung",
"bescheid", "verfügung", "ministerium", "behörde"
],
EmailCategory.PERSONAL: [
"personalrat", "stellenausschreibung", "versetzung",
"beurteilung", "dienstzeugnis", "krankmeldung", "elternzeit"
],
EmailCategory.FINANZEN: [
"budget", "haushalt", "etat", "abrechnung", "rechnung",
"erstattung", "zuschuss", "fördermittel"
],
EmailCategory.ELTERN: [
"elternbrief", "elternabend", "schulkonferenz",
"elternvertreter", "elternbeirat"
],
EmailCategory.SCHUELER: [
"schüler", "schülerin", "zeugnis", "klasse", "unterricht",
"prüfung", "klassenfahrt", "schulpflicht"
],
EmailCategory.FORTBILDUNG: [
"fortbildung", "seminar", "workshop", "schulung",
"weiterbildung", "nlq", "didaktik"
],
EmailCategory.VERANSTALTUNG: [
"einladung", "veranstaltung", "termin", "konferenz",
"sitzung", "tagung", "feier"
],
EmailCategory.SICHERHEIT: [
"sicherheit", "notfall", "brandschutz", "evakuierung",
"hygiene", "corona", "infektionsschutz"
],
EmailCategory.TECHNIK: [
"it", "software", "computer", "netzwerk", "login",
"passwort", "digitalisierung", "iserv"
],
EmailCategory.NEWSLETTER: [
"newsletter", "rundschreiben", "info-mail", "mitteilung"
],
EmailCategory.WERBUNG: [
"angebot", "rabatt", "aktion", "werbung", "abonnement"
],
}
best_category = EmailCategory.SONSTIGES
best_score = 0.0
for category, keywords in category_keywords.items():
score = sum(1 for kw in keywords if kw in text)
if score > best_score:
best_score = score
best_category = category
# Adjust based on sender type
if sender_type in [SenderType.KULTUSMINISTERIUM, SenderType.LANDESSCHULBEHOERDE, SenderType.RLSB]:
if best_category == EmailCategory.SONSTIGES:
best_category = EmailCategory.DIENSTLICH
best_score = 2
# Convert score to confidence
confidence = min(0.9, 0.4 + (best_score * 0.15))
return best_category, confidence
async def _classify_category_llm(
self,
subject: str,
body_preview: str,
) -> Tuple[EmailCategory, float]:
"""LLM-based category classification."""
try:
client = await self.get_http_client()
categories = ", ".join([c.value for c in EmailCategory])
prompt = f"""Klassifiziere diese E-Mail in EINE Kategorie:
Betreff: {subject}
Inhalt: {body_preview[:500]}
Kategorien: {categories}
Antworte NUR mit dem Kategorienamen und einer Konfidenz (0.0-1.0):
Format: kategorie|konfidenz
"""
response = await client.post(
f"{LLM_GATEWAY_URL}/api/v1/inference",
json={
"prompt": prompt,
"playbook": "mail_analysis",
"max_tokens": 50,
},
)
if response.status_code == 200:
data = response.json()
result = data.get("response", "sonstiges|0.5")
parts = result.strip().split("|")
if len(parts) >= 2:
category_str = parts[0].strip().lower()
confidence = float(parts[1].strip())
try:
category = EmailCategory(category_str)
return category, min(max(confidence, 0.0), 1.0)
except ValueError:
pass
except Exception as e:
logger.warning(f"LLM category classification failed: {e}")
return EmailCategory.SONSTIGES, 0.5
# =========================================================================
# Full Analysis Pipeline
# =========================================================================
async def analyze_email(
self,
email_id: str,
sender_email: str,
sender_name: Optional[str],
subject: str,
body_text: Optional[str],
body_preview: Optional[str],
) -> EmailAnalysisResult:
"""
Run full analysis pipeline on an email.
Args:
email_id: Database ID of the email
sender_email: Sender's email address
sender_name: Sender's display name
subject: Email subject
body_text: Full body text
body_preview: Preview text
Returns:
Complete analysis result
"""
# 1. Classify sender
sender_classification = await self.classify_sender(
sender_email, sender_name, subject, body_preview
)
# 2. Extract deadlines
deadlines = await self.extract_deadlines(subject, body_text or "")
# 3. Classify category
category, category_confidence = await self.classify_category(
subject, body_preview or "", sender_classification.sender_type
)
# 4. Determine priority
suggested_priority = get_priority_from_sender_type(sender_classification.sender_type)
# Upgrade priority if deadlines are found
if deadlines:
nearest_deadline = min(d.deadline_date for d in deadlines)
days_until = (nearest_deadline - datetime.now()).days
if days_until <= 1:
suggested_priority = TaskPriority.URGENT
elif days_until <= 3:
suggested_priority = TaskPriority.HIGH
elif days_until <= 7:
suggested_priority = max(suggested_priority, TaskPriority.MEDIUM)
# 5. Generate summary (optional, can be expensive)
summary = None # Could add LLM summary generation here
# 6. Determine if task should be auto-created
auto_create_task = (
len(deadlines) > 0 or
sender_classification.sender_type in [
SenderType.KULTUSMINISTERIUM,
SenderType.LANDESSCHULBEHOERDE,
SenderType.RLSB,
]
)
# 7. Store analysis in database
await update_email_ai_analysis(
email_id=email_id,
category=category.value,
sender_type=sender_classification.sender_type.value,
sender_authority_name=sender_classification.authority_name,
detected_deadlines=[
{
"date": d.deadline_date.isoformat(),
"description": d.description,
"is_firm": d.is_firm,
}
for d in deadlines
],
suggested_priority=suggested_priority.value,
ai_summary=summary,
)
return EmailAnalysisResult(
email_id=email_id,
category=category,
category_confidence=category_confidence,
sender_classification=sender_classification,
deadlines=deadlines,
suggested_priority=suggested_priority,
summary=summary,
suggested_actions=[],
auto_create_task=auto_create_task,
)
# =========================================================================
# Response Suggestions
# =========================================================================
async def suggest_response(
self,
subject: str,
body_text: str,
sender_type: SenderType,
category: EmailCategory,
) -> List[ResponseSuggestion]:
"""
Generate response suggestions for an email.
Args:
subject: Original email subject
body_text: Original email body
sender_type: Classified sender type
category: Classified category
Returns:
List of response suggestions
"""
suggestions = []
# Add standard templates based on sender type and category
if sender_type in [SenderType.KULTUSMINISTERIUM, SenderType.LANDESSCHULBEHOERDE, SenderType.RLSB]:
suggestions.append(ResponseSuggestion(
template_type="acknowledgment",
subject=f"Re: {subject}",
body="""Sehr geehrte Damen und Herren,
vielen Dank für Ihre Nachricht.
Ich bestätige den Eingang und werde die Angelegenheit fristgerecht bearbeiten.
Mit freundlichen Grüßen""",
confidence=0.8,
))
if category == EmailCategory.ELTERN:
suggestions.append(ResponseSuggestion(
template_type="parent_response",
subject=f"Re: {subject}",
body="""Liebe Eltern,
vielen Dank für Ihre Nachricht.
[Ihre Antwort hier]
Mit freundlichen Grüßen""",
confidence=0.7,
))
# Add LLM-generated suggestion
try:
llm_suggestion = await self._generate_response_llm(subject, body_text[:500], sender_type)
if llm_suggestion:
suggestions.append(llm_suggestion)
except Exception as e:
logger.warning(f"LLM response generation failed: {e}")
return suggestions
async def _generate_response_llm(
self,
subject: str,
body_preview: str,
sender_type: SenderType,
) -> Optional[ResponseSuggestion]:
"""Generate a response suggestion using LLM."""
try:
client = await self.get_http_client()
sender_desc = {
SenderType.KULTUSMINISTERIUM: "dem Kultusministerium",
SenderType.LANDESSCHULBEHOERDE: "der Landesschulbehörde",
SenderType.RLSB: "dem RLSB",
SenderType.ELTERNVERTRETER: "einem Elternvertreter",
}.get(sender_type, "einem Absender")
prompt = f"""Du bist eine Schulleiterin in Niedersachsen. Formuliere eine professionelle, kurze Antwort auf diese E-Mail von {sender_desc}:
Betreff: {subject}
Inhalt: {body_preview}
Die Antwort sollte:
- Höflich und formell sein
- Den Eingang bestätigen
- Eine konkrete nächste Aktion nennen oder um Klärung bitten
Antworte NUR mit dem Antworttext (ohne Betreffzeile, ohne "Betreff:").
"""
response = await client.post(
f"{LLM_GATEWAY_URL}/api/v1/inference",
json={
"prompt": prompt,
"playbook": "mail_analysis",
"max_tokens": 300,
},
)
if response.status_code == 200:
data = response.json()
body = data.get("response", "").strip()
if body:
return ResponseSuggestion(
template_type="ai_generated",
subject=f"Re: {subject}",
body=body,
confidence=0.6,
)
except Exception as e:
logger.warning(f"LLM response generation failed: {e}")
return None
# Global instance
_ai_service: Optional[AIEmailService] = None
def get_ai_email_service() -> AIEmailService:
"""Get or create the global AIEmailService instance."""
global _ai_service
if _ai_service is None:
_ai_service = AIEmailService()
return _ai_service

View File

@@ -0,0 +1,651 @@
"""
Unified Inbox Mail API
FastAPI router for the mail system.
"""
import logging
from typing import Optional, List
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from pydantic import BaseModel
from .models import (
EmailAccountCreate,
EmailAccountUpdate,
EmailAccount,
AccountTestResult,
AggregatedEmail,
EmailSearchParams,
TaskCreate,
TaskUpdate,
InboxTask,
TaskDashboardStats,
EmailComposeRequest,
EmailSendResult,
MailStats,
MailHealthCheck,
EmailAnalysisResult,
ResponseSuggestion,
TaskStatus,
TaskPriority,
EmailCategory,
)
from .mail_db import (
init_mail_tables,
create_email_account,
get_email_accounts,
get_email_account,
delete_email_account,
get_unified_inbox,
get_email,
mark_email_read,
mark_email_starred,
get_mail_stats,
log_mail_audit,
)
from .credentials import get_credentials_service
from .aggregator import get_mail_aggregator
from .ai_service import get_ai_email_service
from .task_service import get_task_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/v1/mail", tags=["Mail"])
# =============================================================================
# Health & Init
# =============================================================================
@router.get("/health", response_model=MailHealthCheck)
async def health_check():
"""Health check for the mail system."""
# TODO: Implement full health check
return MailHealthCheck(
status="healthy",
database_connected=True,
vault_connected=True,
)
@router.post("/init")
async def initialize_mail_system():
"""Initialize mail database tables."""
success = await init_mail_tables()
if not success:
raise HTTPException(status_code=500, detail="Failed to initialize mail tables")
return {"status": "initialized"}
# =============================================================================
# Account Management
# =============================================================================
class AccountCreateRequest(BaseModel):
"""Request to create an email account."""
email: str
display_name: str
account_type: str = "personal"
imap_host: str
imap_port: int = 993
imap_ssl: bool = True
smtp_host: str
smtp_port: int = 465
smtp_ssl: bool = True
password: str
@router.post("/accounts", response_model=dict)
async def create_account(
request: AccountCreateRequest,
user_id: str = Query(..., description="User ID"),
tenant_id: str = Query(..., description="Tenant ID"),
):
"""Create a new email account."""
credentials_service = get_credentials_service()
# Store credentials securely
vault_path = await credentials_service.store_credentials(
account_id=f"{user_id}_{request.email}",
email=request.email,
password=request.password,
imap_host=request.imap_host,
imap_port=request.imap_port,
smtp_host=request.smtp_host,
smtp_port=request.smtp_port,
)
# Create account in database
account_id = await create_email_account(
user_id=user_id,
tenant_id=tenant_id,
email=request.email,
display_name=request.display_name,
account_type=request.account_type,
imap_host=request.imap_host,
imap_port=request.imap_port,
imap_ssl=request.imap_ssl,
smtp_host=request.smtp_host,
smtp_port=request.smtp_port,
smtp_ssl=request.smtp_ssl,
vault_path=vault_path,
)
if not account_id:
raise HTTPException(status_code=500, detail="Failed to create account")
# Log audit
await log_mail_audit(
user_id=user_id,
action="account_created",
entity_type="account",
entity_id=account_id,
details={"email": request.email},
tenant_id=tenant_id,
)
return {"id": account_id, "status": "created"}
@router.get("/accounts", response_model=List[dict])
async def list_accounts(
user_id: str = Query(..., description="User ID"),
tenant_id: Optional[str] = Query(None, description="Tenant ID"),
):
"""List all email accounts for a user."""
accounts = await get_email_accounts(user_id, tenant_id)
# Remove sensitive fields
for account in accounts:
account.pop("vault_path", None)
return accounts
@router.get("/accounts/{account_id}", response_model=dict)
async def get_account(
account_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Get a single email account."""
account = await get_email_account(account_id, user_id)
if not account:
raise HTTPException(status_code=404, detail="Account not found")
account.pop("vault_path", None)
return account
@router.delete("/accounts/{account_id}")
async def remove_account(
account_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Delete an email account."""
account = await get_email_account(account_id, user_id)
if not account:
raise HTTPException(status_code=404, detail="Account not found")
# Delete credentials
credentials_service = get_credentials_service()
vault_path = account.get("vault_path", "")
if vault_path:
await credentials_service.delete_credentials(account_id, vault_path)
# Delete from database (cascades to emails)
success = await delete_email_account(account_id, user_id)
if not success:
raise HTTPException(status_code=500, detail="Failed to delete account")
await log_mail_audit(
user_id=user_id,
action="account_deleted",
entity_type="account",
entity_id=account_id,
)
return {"status": "deleted"}
@router.post("/accounts/{account_id}/test", response_model=AccountTestResult)
async def test_account_connection(
account_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Test connection for an email account."""
account = await get_email_account(account_id, user_id)
if not account:
raise HTTPException(status_code=404, detail="Account not found")
# Get credentials
credentials_service = get_credentials_service()
vault_path = account.get("vault_path", "")
creds = await credentials_service.get_credentials(account_id, vault_path)
if not creds:
return AccountTestResult(
success=False,
error_message="Credentials not found"
)
# Test connection
aggregator = get_mail_aggregator()
result = await aggregator.test_account_connection(
imap_host=account["imap_host"],
imap_port=account["imap_port"],
imap_ssl=account["imap_ssl"],
smtp_host=account["smtp_host"],
smtp_port=account["smtp_port"],
smtp_ssl=account["smtp_ssl"],
email_address=creds.email,
password=creds.password,
)
return result
class ConnectionTestRequest(BaseModel):
"""Request to test connection before saving account."""
email: str
imap_host: str
imap_port: int = 993
imap_ssl: bool = True
smtp_host: str
smtp_port: int = 465
smtp_ssl: bool = True
password: str
@router.post("/accounts/test-connection", response_model=AccountTestResult)
async def test_connection_before_save(request: ConnectionTestRequest):
"""
Test IMAP/SMTP connection before saving an account.
This allows the wizard to verify credentials are correct
before creating the account in the database.
"""
aggregator = get_mail_aggregator()
result = await aggregator.test_account_connection(
imap_host=request.imap_host,
imap_port=request.imap_port,
imap_ssl=request.imap_ssl,
smtp_host=request.smtp_host,
smtp_port=request.smtp_port,
smtp_ssl=request.smtp_ssl,
email_address=request.email,
password=request.password,
)
return result
@router.post("/accounts/{account_id}/sync")
async def sync_account(
account_id: str,
user_id: str = Query(..., description="User ID"),
max_emails: int = Query(100, ge=1, le=500),
background_tasks: BackgroundTasks = None,
):
"""Sync emails from an account."""
aggregator = get_mail_aggregator()
try:
new_count, total_count = await aggregator.sync_account(
account_id=account_id,
user_id=user_id,
max_emails=max_emails,
)
return {
"status": "synced",
"new_emails": new_count,
"total_emails": total_count,
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# =============================================================================
# Unified Inbox
# =============================================================================
@router.get("/inbox", response_model=List[dict])
async def get_inbox(
user_id: str = Query(..., description="User ID"),
account_ids: Optional[str] = Query(None, description="Comma-separated account IDs"),
categories: Optional[str] = Query(None, description="Comma-separated categories"),
is_read: Optional[bool] = Query(None),
is_starred: Optional[bool] = Query(None),
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
):
"""Get unified inbox with all accounts aggregated."""
# Parse comma-separated values
account_id_list = account_ids.split(",") if account_ids else None
category_list = categories.split(",") if categories else None
emails = await get_unified_inbox(
user_id=user_id,
account_ids=account_id_list,
categories=category_list,
is_read=is_read,
is_starred=is_starred,
limit=limit,
offset=offset,
)
return emails
@router.get("/inbox/{email_id}", response_model=dict)
async def get_email_detail(
email_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Get a single email with full details."""
email_data = await get_email(email_id, user_id)
if not email_data:
raise HTTPException(status_code=404, detail="Email not found")
# Mark as read
await mark_email_read(email_id, user_id, is_read=True)
return email_data
@router.post("/inbox/{email_id}/read")
async def mark_read(
email_id: str,
user_id: str = Query(..., description="User ID"),
is_read: bool = Query(True),
):
"""Mark email as read/unread."""
success = await mark_email_read(email_id, user_id, is_read)
if not success:
raise HTTPException(status_code=500, detail="Failed to update email")
return {"status": "updated", "is_read": is_read}
@router.post("/inbox/{email_id}/star")
async def mark_starred(
email_id: str,
user_id: str = Query(..., description="User ID"),
is_starred: bool = Query(True),
):
"""Mark email as starred/unstarred."""
success = await mark_email_starred(email_id, user_id, is_starred)
if not success:
raise HTTPException(status_code=500, detail="Failed to update email")
return {"status": "updated", "is_starred": is_starred}
# =============================================================================
# Send Email
# =============================================================================
@router.post("/send", response_model=EmailSendResult)
async def send_email(
request: EmailComposeRequest,
user_id: str = Query(..., description="User ID"),
):
"""Send an email."""
aggregator = get_mail_aggregator()
result = await aggregator.send_email(
account_id=request.account_id,
user_id=user_id,
request=request,
)
if result.success:
await log_mail_audit(
user_id=user_id,
action="email_sent",
entity_type="email",
details={
"account_id": request.account_id,
"to": request.to,
"subject": request.subject,
},
)
return result
# =============================================================================
# AI Analysis
# =============================================================================
@router.post("/analyze/{email_id}", response_model=EmailAnalysisResult)
async def analyze_email(
email_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Run AI analysis on an email."""
email_data = await get_email(email_id, user_id)
if not email_data:
raise HTTPException(status_code=404, detail="Email not found")
ai_service = get_ai_email_service()
result = await ai_service.analyze_email(
email_id=email_id,
sender_email=email_data.get("sender_email", ""),
sender_name=email_data.get("sender_name"),
subject=email_data.get("subject", ""),
body_text=email_data.get("body_text"),
body_preview=email_data.get("body_preview"),
)
return result
@router.get("/suggestions/{email_id}", response_model=List[ResponseSuggestion])
async def get_response_suggestions(
email_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Get AI-generated response suggestions for an email."""
email_data = await get_email(email_id, user_id)
if not email_data:
raise HTTPException(status_code=404, detail="Email not found")
ai_service = get_ai_email_service()
# Use stored analysis if available
from .models import SenderType, EmailCategory as EC
sender_type = SenderType(email_data.get("sender_type", "unbekannt"))
category = EC(email_data.get("category", "sonstiges"))
suggestions = await ai_service.suggest_response(
subject=email_data.get("subject", ""),
body_text=email_data.get("body_text", ""),
sender_type=sender_type,
category=category,
)
return suggestions
# =============================================================================
# Tasks (Arbeitsvorrat)
# =============================================================================
@router.get("/tasks", response_model=List[dict])
async def list_tasks(
user_id: str = Query(..., description="User ID"),
status: Optional[str] = Query(None, description="Filter by status"),
priority: Optional[str] = Query(None, description="Filter by priority"),
include_completed: bool = Query(False),
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
):
"""Get all tasks for a user."""
task_service = get_task_service()
status_enum = TaskStatus(status) if status else None
priority_enum = TaskPriority(priority) if priority else None
tasks = await task_service.get_user_tasks(
user_id=user_id,
status=status_enum,
priority=priority_enum,
include_completed=include_completed,
limit=limit,
offset=offset,
)
return tasks
@router.post("/tasks", response_model=dict)
async def create_task(
request: TaskCreate,
user_id: str = Query(..., description="User ID"),
tenant_id: str = Query(..., description="Tenant ID"),
):
"""Create a new task manually."""
task_service = get_task_service()
task_id = await task_service.create_manual_task(
user_id=user_id,
tenant_id=tenant_id,
task_data=request,
)
if not task_id:
raise HTTPException(status_code=500, detail="Failed to create task")
return {"id": task_id, "status": "created"}
@router.get("/tasks/dashboard", response_model=TaskDashboardStats)
async def get_task_dashboard(
user_id: str = Query(..., description="User ID"),
):
"""Get dashboard statistics for tasks."""
task_service = get_task_service()
return await task_service.get_dashboard_stats(user_id)
@router.get("/tasks/{task_id}", response_model=dict)
async def get_task(
task_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Get a single task."""
task_service = get_task_service()
task = await task_service.get_task(task_id, user_id)
if not task:
raise HTTPException(status_code=404, detail="Task not found")
return task
@router.put("/tasks/{task_id}")
async def update_task(
task_id: str,
request: TaskUpdate,
user_id: str = Query(..., description="User ID"),
):
"""Update a task."""
task_service = get_task_service()
success = await task_service.update_task(task_id, user_id, request)
if not success:
raise HTTPException(status_code=500, detail="Failed to update task")
return {"status": "updated"}
@router.post("/tasks/{task_id}/complete")
async def complete_task(
task_id: str,
user_id: str = Query(..., description="User ID"),
):
"""Mark a task as completed."""
task_service = get_task_service()
success = await task_service.mark_completed(task_id, user_id)
if not success:
raise HTTPException(status_code=500, detail="Failed to complete task")
return {"status": "completed"}
@router.post("/tasks/from-email/{email_id}")
async def create_task_from_email(
email_id: str,
user_id: str = Query(..., description="User ID"),
tenant_id: str = Query(..., description="Tenant ID"),
):
"""Create a task from an email (after analysis)."""
email_data = await get_email(email_id, user_id)
if not email_data:
raise HTTPException(status_code=404, detail="Email not found")
# Get deadlines from stored analysis
deadlines_raw = email_data.get("detected_deadlines", [])
from .models import DeadlineExtraction, SenderType
deadlines = []
for d in deadlines_raw:
try:
deadlines.append(DeadlineExtraction(
deadline_date=datetime.fromisoformat(d["date"]),
description=d.get("description", "Frist"),
confidence=0.8,
source_text="",
is_firm=d.get("is_firm", True),
))
except (KeyError, ValueError):
continue
sender_type = None
if email_data.get("sender_type"):
try:
sender_type = SenderType(email_data["sender_type"])
except ValueError:
pass
task_service = get_task_service()
task_id = await task_service.create_task_from_email(
user_id=user_id,
tenant_id=tenant_id,
email_id=email_id,
deadlines=deadlines,
sender_type=sender_type,
)
if not task_id:
raise HTTPException(status_code=500, detail="Failed to create task")
return {"id": task_id, "status": "created"}
# =============================================================================
# Statistics
# =============================================================================
@router.get("/stats", response_model=MailStats)
async def get_statistics(
user_id: str = Query(..., description="User ID"),
):
"""Get overall mail statistics for a user."""
stats = await get_mail_stats(user_id)
return MailStats(**stats)
# =============================================================================
# Sync All
# =============================================================================
@router.post("/sync-all")
async def sync_all_accounts(
user_id: str = Query(..., description="User ID"),
tenant_id: Optional[str] = Query(None),
):
"""Sync all email accounts for a user."""
aggregator = get_mail_aggregator()
results = await aggregator.sync_all_accounts(user_id, tenant_id)
return {"status": "synced", "results": results}

View File

@@ -0,0 +1,373 @@
"""
Mail Credentials Service
Secure storage and retrieval of email account credentials using HashiCorp Vault.
Falls back to encrypted database storage in development.
"""
import os
import base64
import hashlib
import logging
from typing import Optional, Dict
from dataclasses import dataclass
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
logger = logging.getLogger(__name__)
# Environment
ENVIRONMENT = os.getenv("ENVIRONMENT", "development")
VAULT_ADDR = os.getenv("VAULT_ADDR", "")
MAIL_CREDENTIALS_DIR = os.getenv("MAIL_CREDENTIALS_DIR", os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "data", "mail_credentials"))
@dataclass
class MailCredentials:
"""Mail account credentials."""
email: str
password: str
imap_host: str
imap_port: int
smtp_host: str
smtp_port: int
class MailCredentialsService:
"""
Service for storing and retrieving mail credentials securely.
In production: Uses HashiCorp Vault KV v2
In development: Uses Fernet encryption with a derived key
"""
def __init__(self):
self._vault_client = None
self._vault_available = False
self._encryption_key = None
# Try to initialize Vault
if VAULT_ADDR:
self._init_vault()
else:
# Development fallback: use encryption key
self._init_encryption()
def _init_vault(self):
"""Initialize Vault client for credential storage."""
try:
import hvac
vault_token = os.getenv("VAULT_TOKEN")
if not vault_token:
logger.warning("VAULT_ADDR set but no VAULT_TOKEN - Vault disabled")
self._init_encryption()
return
self._vault_client = hvac.Client(
url=VAULT_ADDR,
token=vault_token,
)
if self._vault_client.is_authenticated():
self._vault_available = True
logger.info("Mail credentials service: Vault initialized")
else:
logger.warning("Vault authentication failed - using encryption fallback")
self._init_encryption()
except ImportError:
logger.warning("hvac not installed - using encryption fallback")
self._init_encryption()
except Exception as e:
logger.warning(f"Vault initialization failed: {e}")
self._init_encryption()
def _init_encryption(self):
"""Initialize encryption for development/fallback mode."""
# Derive key from environment secret - REQUIRED
secret = os.getenv("MAIL_ENCRYPTION_SECRET")
if not secret:
raise RuntimeError("MAIL_ENCRYPTION_SECRET nicht konfiguriert - bitte via Vault oder Umgebungsvariable setzen")
salt = os.getenv("MAIL_ENCRYPTION_SALT", "breakpilot-mail-salt").encode()
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt,
iterations=480000,
)
key = base64.urlsafe_b64encode(kdf.derive(secret.encode()))
self._encryption_key = Fernet(key)
logger.info("Mail credentials service: Using encrypted storage")
def _get_vault_path(self, account_id: str) -> str:
"""Generate Vault path for a mail account."""
return f"breakpilot/mail/accounts/{account_id}"
async def store_credentials(
self,
account_id: str,
email: str,
password: str,
imap_host: str,
imap_port: int,
smtp_host: str,
smtp_port: int,
) -> str:
"""
Store mail credentials securely.
Returns:
vault_path: Path or reference to stored credentials
"""
if self._vault_available:
return await self._store_in_vault(
account_id, email, password, imap_host, imap_port, smtp_host, smtp_port
)
else:
return await self._store_encrypted(
account_id, email, password, imap_host, imap_port, smtp_host, smtp_port
)
async def _store_in_vault(
self,
account_id: str,
email: str,
password: str,
imap_host: str,
imap_port: int,
smtp_host: str,
smtp_port: int,
) -> str:
"""Store credentials in Vault KV v2."""
path = self._get_vault_path(account_id)
try:
self._vault_client.secrets.kv.v2.create_or_update_secret(
path=path,
secret={
"email": email,
"password": password,
"imap_host": imap_host,
"imap_port": str(imap_port),
"smtp_host": smtp_host,
"smtp_port": str(smtp_port),
},
mount_point="secret",
)
logger.info(f"Stored credentials in Vault for account {account_id}")
return f"vault:{path}"
except Exception as e:
logger.error(f"Failed to store credentials in Vault: {e}")
raise
async def _store_encrypted(
self,
account_id: str,
email: str,
password: str,
imap_host: str,
imap_port: int,
smtp_host: str,
smtp_port: int,
) -> str:
"""Store credentials encrypted (development fallback)."""
import json
credentials = {
"email": email,
"password": password,
"imap_host": imap_host,
"imap_port": imap_port,
"smtp_host": smtp_host,
"smtp_port": smtp_port,
}
# Encrypt the credentials
encrypted = self._encryption_key.encrypt(json.dumps(credentials).encode())
# Store in file (development only)
os.makedirs(MAIL_CREDENTIALS_DIR, exist_ok=True)
path = f"{MAIL_CREDENTIALS_DIR}/{account_id}.enc"
with open(path, "wb") as f:
f.write(encrypted)
logger.info(f"Stored encrypted credentials for account {account_id}")
return f"file:{path}"
async def get_credentials(self, account_id: str, vault_path: str) -> Optional[MailCredentials]:
"""
Retrieve mail credentials.
Args:
account_id: The account ID
vault_path: The storage path (from store_credentials)
Returns:
MailCredentials or None if not found
"""
if vault_path.startswith("vault:"):
return await self._get_from_vault(vault_path[6:])
elif vault_path.startswith("file:"):
return await self._get_from_file(vault_path[5:])
else:
# Legacy path format
return await self._get_from_vault(vault_path)
async def _get_from_vault(self, path: str) -> Optional[MailCredentials]:
"""Retrieve credentials from Vault."""
if not self._vault_available:
logger.warning("Vault not available for credential retrieval")
return None
try:
response = self._vault_client.secrets.kv.v2.read_secret_version(
path=path,
mount_point="secret",
)
if response and "data" in response and "data" in response["data"]:
data = response["data"]["data"]
return MailCredentials(
email=data["email"],
password=data["password"],
imap_host=data["imap_host"],
imap_port=int(data["imap_port"]),
smtp_host=data["smtp_host"],
smtp_port=int(data["smtp_port"]),
)
except Exception as e:
logger.error(f"Failed to retrieve credentials from Vault: {e}")
return None
async def _get_from_file(self, path: str) -> Optional[MailCredentials]:
"""Retrieve credentials from encrypted file (development)."""
import json
try:
with open(path, "rb") as f:
encrypted = f.read()
decrypted = self._encryption_key.decrypt(encrypted)
data = json.loads(decrypted.decode())
return MailCredentials(
email=data["email"],
password=data["password"],
imap_host=data["imap_host"],
imap_port=data["imap_port"],
smtp_host=data["smtp_host"],
smtp_port=data["smtp_port"],
)
except FileNotFoundError:
logger.warning(f"Credentials file not found: {path}")
except Exception as e:
logger.error(f"Failed to decrypt credentials: {e}")
return None
async def delete_credentials(self, account_id: str, vault_path: str) -> bool:
"""
Delete stored credentials.
Args:
account_id: The account ID
vault_path: The storage path
Returns:
True if deleted successfully
"""
if vault_path.startswith("vault:"):
return await self._delete_from_vault(vault_path[6:])
elif vault_path.startswith("file:"):
return await self._delete_from_file(vault_path[5:])
return False
async def _delete_from_vault(self, path: str) -> bool:
"""Delete credentials from Vault."""
if not self._vault_available:
return False
try:
self._vault_client.secrets.kv.v2.delete_metadata_and_all_versions(
path=path,
mount_point="secret",
)
logger.info(f"Deleted credentials from Vault: {path}")
return True
except Exception as e:
logger.error(f"Failed to delete credentials from Vault: {e}")
return False
async def _delete_from_file(self, path: str) -> bool:
"""Delete credentials file."""
try:
os.remove(path)
logger.info(f"Deleted credentials file: {path}")
return True
except FileNotFoundError:
return True # Already deleted
except Exception as e:
logger.error(f"Failed to delete credentials file: {e}")
return False
async def update_password(
self,
account_id: str,
vault_path: str,
new_password: str,
) -> bool:
"""
Update the password for stored credentials.
Args:
account_id: The account ID
vault_path: The storage path
new_password: The new password
Returns:
True if updated successfully
"""
# Get existing credentials
creds = await self.get_credentials(account_id, vault_path)
if not creds:
return False
# Store with new password
try:
await self.store_credentials(
account_id=account_id,
email=creds.email,
password=new_password,
imap_host=creds.imap_host,
imap_port=creds.imap_port,
smtp_host=creds.smtp_host,
smtp_port=creds.smtp_port,
)
return True
except Exception as e:
logger.error(f"Failed to update password: {e}")
return False
# Global instance
_credentials_service: Optional[MailCredentialsService] = None
def get_credentials_service() -> MailCredentialsService:
"""Get or create the global MailCredentialsService instance."""
global _credentials_service
if _credentials_service is None:
_credentials_service = MailCredentialsService()
return _credentials_service

View File

@@ -0,0 +1,987 @@
"""
Unified Inbox Mail Database Service
PostgreSQL database operations for multi-account mail aggregation.
"""
import os
import json
import uuid
from typing import Optional, List, Dict, Any
from datetime import datetime, timedelta
# Database Configuration - from Vault or environment (test default for CI)
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://test:test@localhost:5432/test")
# Flag to check if using test defaults
_DB_CONFIGURED = DATABASE_URL != "postgresql://test:test@localhost:5432/test"
# Connection pool (shared with metrics_db)
_pool = None
async def get_pool():
"""Get or create database connection pool."""
global _pool
if _pool is None:
try:
import asyncpg
_pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10)
except ImportError:
print("Warning: asyncpg not installed. Mail database disabled.")
return None
except Exception as e:
print(f"Warning: Failed to connect to PostgreSQL: {e}")
return None
return _pool
async def init_mail_tables() -> bool:
"""Initialize mail tables in PostgreSQL."""
pool = await get_pool()
if pool is None:
return False
create_tables_sql = """
-- =============================================================================
-- External Email Accounts
-- =============================================================================
CREATE TABLE IF NOT EXISTS external_email_accounts (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
email VARCHAR(255) NOT NULL,
display_name VARCHAR(255),
account_type VARCHAR(50) DEFAULT 'personal',
-- IMAP Settings (password stored in Vault)
imap_host VARCHAR(255) NOT NULL,
imap_port INTEGER DEFAULT 993,
imap_ssl BOOLEAN DEFAULT TRUE,
-- SMTP Settings
smtp_host VARCHAR(255) NOT NULL,
smtp_port INTEGER DEFAULT 465,
smtp_ssl BOOLEAN DEFAULT TRUE,
-- Vault path for credentials
vault_path VARCHAR(500),
-- Status tracking
status VARCHAR(20) DEFAULT 'pending',
last_sync TIMESTAMP,
sync_error TEXT,
email_count INTEGER DEFAULT 0,
unread_count INTEGER DEFAULT 0,
-- Timestamps
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW(),
-- Constraints
UNIQUE(user_id, email)
);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_user ON external_email_accounts(user_id);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_tenant ON external_email_accounts(tenant_id);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_status ON external_email_accounts(status);
-- =============================================================================
-- Aggregated Emails
-- =============================================================================
CREATE TABLE IF NOT EXISTS aggregated_emails (
id VARCHAR(36) PRIMARY KEY,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE CASCADE,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
-- Email identification
message_id VARCHAR(500) NOT NULL,
folder VARCHAR(100) DEFAULT 'INBOX',
-- Email content
subject TEXT,
sender_email VARCHAR(255),
sender_name VARCHAR(255),
recipients JSONB DEFAULT '[]',
cc JSONB DEFAULT '[]',
body_preview TEXT,
body_text TEXT,
body_html TEXT,
has_attachments BOOLEAN DEFAULT FALSE,
attachments JSONB DEFAULT '[]',
headers JSONB DEFAULT '{}',
-- Status flags
is_read BOOLEAN DEFAULT FALSE,
is_starred BOOLEAN DEFAULT FALSE,
is_deleted BOOLEAN DEFAULT FALSE,
-- Dates
date_sent TIMESTAMP,
date_received TIMESTAMP,
-- AI enrichment
category VARCHAR(50),
sender_type VARCHAR(50),
sender_authority_name VARCHAR(255),
detected_deadlines JSONB DEFAULT '[]',
suggested_priority VARCHAR(20),
ai_summary TEXT,
ai_analyzed_at TIMESTAMP,
created_at TIMESTAMP DEFAULT NOW(),
-- Prevent duplicate imports
UNIQUE(account_id, message_id)
);
CREATE INDEX IF NOT EXISTS idx_emails_account ON aggregated_emails(account_id);
CREATE INDEX IF NOT EXISTS idx_emails_user ON aggregated_emails(user_id);
CREATE INDEX IF NOT EXISTS idx_emails_tenant ON aggregated_emails(tenant_id);
CREATE INDEX IF NOT EXISTS idx_emails_date ON aggregated_emails(date_received DESC);
CREATE INDEX IF NOT EXISTS idx_emails_category ON aggregated_emails(category);
CREATE INDEX IF NOT EXISTS idx_emails_unread ON aggregated_emails(is_read) WHERE is_read = FALSE;
CREATE INDEX IF NOT EXISTS idx_emails_starred ON aggregated_emails(is_starred) WHERE is_starred = TRUE;
CREATE INDEX IF NOT EXISTS idx_emails_sender ON aggregated_emails(sender_email);
-- =============================================================================
-- Inbox Tasks (Arbeitsvorrat)
-- =============================================================================
CREATE TABLE IF NOT EXISTS inbox_tasks (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
email_id VARCHAR(36) REFERENCES aggregated_emails(id) ON DELETE SET NULL,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE SET NULL,
-- Task content
title VARCHAR(500) NOT NULL,
description TEXT,
priority VARCHAR(20) DEFAULT 'medium',
status VARCHAR(20) DEFAULT 'pending',
deadline TIMESTAMP,
-- Source information
source_email_subject TEXT,
source_sender VARCHAR(255),
source_sender_type VARCHAR(50),
-- AI extraction info
ai_extracted BOOLEAN DEFAULT FALSE,
confidence_score FLOAT,
-- Completion tracking
completed_at TIMESTAMP,
reminder_at TIMESTAMP,
-- Timestamps
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_tasks_user ON inbox_tasks(user_id);
CREATE INDEX IF NOT EXISTS idx_tasks_tenant ON inbox_tasks(tenant_id);
CREATE INDEX IF NOT EXISTS idx_tasks_status ON inbox_tasks(status);
CREATE INDEX IF NOT EXISTS idx_tasks_deadline ON inbox_tasks(deadline) WHERE deadline IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_tasks_priority ON inbox_tasks(priority);
CREATE INDEX IF NOT EXISTS idx_tasks_email ON inbox_tasks(email_id) WHERE email_id IS NOT NULL;
-- =============================================================================
-- Email Templates
-- =============================================================================
CREATE TABLE IF NOT EXISTS email_templates (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36), -- NULL for system templates
tenant_id VARCHAR(36),
name VARCHAR(255) NOT NULL,
category VARCHAR(100),
subject_template TEXT,
body_template TEXT,
variables JSONB DEFAULT '[]',
is_system BOOLEAN DEFAULT FALSE,
usage_count INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_templates_user ON email_templates(user_id);
CREATE INDEX IF NOT EXISTS idx_templates_tenant ON email_templates(tenant_id);
CREATE INDEX IF NOT EXISTS idx_templates_system ON email_templates(is_system);
-- =============================================================================
-- Mail Audit Log
-- =============================================================================
CREATE TABLE IF NOT EXISTS mail_audit_log (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36),
action VARCHAR(100) NOT NULL,
entity_type VARCHAR(50), -- account, email, task
entity_id VARCHAR(36),
details JSONB,
ip_address VARCHAR(45),
user_agent TEXT,
created_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_mail_audit_user ON mail_audit_log(user_id);
CREATE INDEX IF NOT EXISTS idx_mail_audit_created ON mail_audit_log(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_mail_audit_action ON mail_audit_log(action);
-- =============================================================================
-- Sync Status Tracking
-- =============================================================================
CREATE TABLE IF NOT EXISTS mail_sync_status (
id VARCHAR(36) PRIMARY KEY,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE CASCADE,
folder VARCHAR(100),
last_uid INTEGER DEFAULT 0,
last_sync TIMESTAMP,
sync_errors INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW(),
UNIQUE(account_id, folder)
);
"""
try:
async with pool.acquire() as conn:
await conn.execute(create_tables_sql)
print("Mail tables initialized successfully")
return True
except Exception as e:
print(f"Failed to initialize mail tables: {e}")
return False
# =============================================================================
# Email Account Operations
# =============================================================================
async def create_email_account(
user_id: str,
tenant_id: str,
email: str,
display_name: str,
account_type: str,
imap_host: str,
imap_port: int,
imap_ssl: bool,
smtp_host: str,
smtp_port: int,
smtp_ssl: bool,
vault_path: str,
) -> Optional[str]:
"""Create a new email account. Returns the account ID."""
pool = await get_pool()
if pool is None:
return None
account_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO external_email_accounts
(id, user_id, tenant_id, email, display_name, account_type,
imap_host, imap_port, imap_ssl, smtp_host, smtp_port, smtp_ssl, vault_path)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
""",
account_id, user_id, tenant_id, email, display_name, account_type,
imap_host, imap_port, imap_ssl, smtp_host, smtp_port, smtp_ssl, vault_path
)
return account_id
except Exception as e:
print(f"Failed to create email account: {e}")
return None
async def get_email_accounts(
user_id: str,
tenant_id: Optional[str] = None,
) -> List[Dict]:
"""Get all email accounts for a user."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
if tenant_id:
rows = await conn.fetch(
"""
SELECT * FROM external_email_accounts
WHERE user_id = $1 AND tenant_id = $2
ORDER BY created_at
""",
user_id, tenant_id
)
else:
rows = await conn.fetch(
"""
SELECT * FROM external_email_accounts
WHERE user_id = $1
ORDER BY created_at
""",
user_id
)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get email accounts: {e}")
return []
async def get_email_account(account_id: str, user_id: str) -> Optional[Dict]:
"""Get a single email account."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT * FROM external_email_accounts
WHERE id = $1 AND user_id = $2
""",
account_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get email account: {e}")
return None
async def update_account_status(
account_id: str,
status: str,
sync_error: Optional[str] = None,
email_count: Optional[int] = None,
unread_count: Optional[int] = None,
) -> bool:
"""Update account sync status."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE external_email_accounts SET
status = $2,
sync_error = $3,
email_count = COALESCE($4, email_count),
unread_count = COALESCE($5, unread_count),
last_sync = NOW(),
updated_at = NOW()
WHERE id = $1
""",
account_id, status, sync_error, email_count, unread_count
)
return True
except Exception as e:
print(f"Failed to update account status: {e}")
return False
async def delete_email_account(account_id: str, user_id: str) -> bool:
"""Delete an email account (cascades to emails)."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
result = await conn.execute(
"""
DELETE FROM external_email_accounts
WHERE id = $1 AND user_id = $2
""",
account_id, user_id
)
return "DELETE" in result
except Exception as e:
print(f"Failed to delete email account: {e}")
return False
# =============================================================================
# Aggregated Email Operations
# =============================================================================
async def upsert_email(
account_id: str,
user_id: str,
tenant_id: str,
message_id: str,
subject: str,
sender_email: str,
sender_name: Optional[str],
recipients: List[str],
cc: List[str],
body_preview: Optional[str],
body_text: Optional[str],
body_html: Optional[str],
has_attachments: bool,
attachments: List[Dict],
headers: Dict,
folder: str,
date_sent: datetime,
date_received: datetime,
) -> Optional[str]:
"""Insert or update an email. Returns the email ID."""
pool = await get_pool()
if pool is None:
return None
email_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
# Try insert, on conflict update (for re-sync scenarios)
row = await conn.fetchrow(
"""
INSERT INTO aggregated_emails
(id, account_id, user_id, tenant_id, message_id, subject,
sender_email, sender_name, recipients, cc, body_preview,
body_text, body_html, has_attachments, attachments, headers,
folder, date_sent, date_received)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
ON CONFLICT (account_id, message_id) DO UPDATE SET
subject = EXCLUDED.subject,
is_read = EXCLUDED.is_read,
folder = EXCLUDED.folder
RETURNING id
""",
email_id, account_id, user_id, tenant_id, message_id, subject,
sender_email, sender_name, json.dumps(recipients), json.dumps(cc),
body_preview, body_text, body_html, has_attachments,
json.dumps(attachments), json.dumps(headers), folder,
date_sent, date_received
)
return row['id'] if row else None
except Exception as e:
print(f"Failed to upsert email: {e}")
return None
async def get_unified_inbox(
user_id: str,
account_ids: Optional[List[str]] = None,
categories: Optional[List[str]] = None,
is_read: Optional[bool] = None,
is_starred: Optional[bool] = None,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""Get unified inbox with filtering."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
# Build dynamic query
conditions = ["user_id = $1", "is_deleted = FALSE"]
params = [user_id]
param_idx = 2
if account_ids:
conditions.append(f"account_id = ANY(${param_idx})")
params.append(account_ids)
param_idx += 1
if categories:
conditions.append(f"category = ANY(${param_idx})")
params.append(categories)
param_idx += 1
if is_read is not None:
conditions.append(f"is_read = ${param_idx}")
params.append(is_read)
param_idx += 1
if is_starred is not None:
conditions.append(f"is_starred = ${param_idx}")
params.append(is_starred)
param_idx += 1
where_clause = " AND ".join(conditions)
params.extend([limit, offset])
query = f"""
SELECT e.*, a.email as account_email, a.display_name as account_name
FROM aggregated_emails e
JOIN external_email_accounts a ON e.account_id = a.id
WHERE {where_clause}
ORDER BY e.date_received DESC
LIMIT ${param_idx} OFFSET ${param_idx + 1}
"""
rows = await conn.fetch(query, *params)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get unified inbox: {e}")
return []
async def get_email(email_id: str, user_id: str) -> Optional[Dict]:
"""Get a single email by ID."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT e.*, a.email as account_email, a.display_name as account_name
FROM aggregated_emails e
JOIN external_email_accounts a ON e.account_id = a.id
WHERE e.id = $1 AND e.user_id = $2
""",
email_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get email: {e}")
return None
async def update_email_ai_analysis(
email_id: str,
category: str,
sender_type: str,
sender_authority_name: Optional[str],
detected_deadlines: List[Dict],
suggested_priority: str,
ai_summary: Optional[str],
) -> bool:
"""Update email with AI analysis results."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET
category = $2,
sender_type = $3,
sender_authority_name = $4,
detected_deadlines = $5,
suggested_priority = $6,
ai_summary = $7,
ai_analyzed_at = NOW()
WHERE id = $1
""",
email_id, category, sender_type, sender_authority_name,
json.dumps(detected_deadlines), suggested_priority, ai_summary
)
return True
except Exception as e:
print(f"Failed to update email AI analysis: {e}")
return False
async def mark_email_read(email_id: str, user_id: str, is_read: bool = True) -> bool:
"""Mark email as read/unread."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET is_read = $3
WHERE id = $1 AND user_id = $2
""",
email_id, user_id, is_read
)
return True
except Exception as e:
print(f"Failed to mark email read: {e}")
return False
async def mark_email_starred(email_id: str, user_id: str, is_starred: bool = True) -> bool:
"""Mark email as starred/unstarred."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET is_starred = $3
WHERE id = $1 AND user_id = $2
""",
email_id, user_id, is_starred
)
return True
except Exception as e:
print(f"Failed to mark email starred: {e}")
return False
# =============================================================================
# Inbox Task Operations
# =============================================================================
async def create_task(
user_id: str,
tenant_id: str,
title: str,
description: Optional[str] = None,
priority: str = "medium",
deadline: Optional[datetime] = None,
email_id: Optional[str] = None,
account_id: Optional[str] = None,
source_email_subject: Optional[str] = None,
source_sender: Optional[str] = None,
source_sender_type: Optional[str] = None,
ai_extracted: bool = False,
confidence_score: Optional[float] = None,
) -> Optional[str]:
"""Create a new inbox task."""
pool = await get_pool()
if pool is None:
return None
task_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO inbox_tasks
(id, user_id, tenant_id, title, description, priority, deadline,
email_id, account_id, source_email_subject, source_sender,
source_sender_type, ai_extracted, confidence_score)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
""",
task_id, user_id, tenant_id, title, description, priority, deadline,
email_id, account_id, source_email_subject, source_sender,
source_sender_type, ai_extracted, confidence_score
)
return task_id
except Exception as e:
print(f"Failed to create task: {e}")
return None
async def get_tasks(
user_id: str,
status: Optional[str] = None,
priority: Optional[str] = None,
include_completed: bool = False,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""Get tasks for a user."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
conditions = ["user_id = $1"]
params = [user_id]
param_idx = 2
if not include_completed:
conditions.append("status != 'completed'")
if status:
conditions.append(f"status = ${param_idx}")
params.append(status)
param_idx += 1
if priority:
conditions.append(f"priority = ${param_idx}")
params.append(priority)
param_idx += 1
where_clause = " AND ".join(conditions)
params.extend([limit, offset])
query = f"""
SELECT * FROM inbox_tasks
WHERE {where_clause}
ORDER BY
CASE priority
WHEN 'urgent' THEN 1
WHEN 'high' THEN 2
WHEN 'medium' THEN 3
WHEN 'low' THEN 4
END,
deadline ASC NULLS LAST,
created_at DESC
LIMIT ${param_idx} OFFSET ${param_idx + 1}
"""
rows = await conn.fetch(query, *params)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get tasks: {e}")
return []
async def get_task(task_id: str, user_id: str) -> Optional[Dict]:
"""Get a single task."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"SELECT * FROM inbox_tasks WHERE id = $1 AND user_id = $2",
task_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get task: {e}")
return None
async def update_task(
task_id: str,
user_id: str,
title: Optional[str] = None,
description: Optional[str] = None,
priority: Optional[str] = None,
status: Optional[str] = None,
deadline: Optional[datetime] = None,
) -> bool:
"""Update a task."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
# Build dynamic update
updates = ["updated_at = NOW()"]
params = [task_id, user_id]
param_idx = 3
if title is not None:
updates.append(f"title = ${param_idx}")
params.append(title)
param_idx += 1
if description is not None:
updates.append(f"description = ${param_idx}")
params.append(description)
param_idx += 1
if priority is not None:
updates.append(f"priority = ${param_idx}")
params.append(priority)
param_idx += 1
if status is not None:
updates.append(f"status = ${param_idx}")
params.append(status)
param_idx += 1
if status == "completed":
updates.append("completed_at = NOW()")
if deadline is not None:
updates.append(f"deadline = ${param_idx}")
params.append(deadline)
param_idx += 1
set_clause = ", ".join(updates)
await conn.execute(
f"UPDATE inbox_tasks SET {set_clause} WHERE id = $1 AND user_id = $2",
*params
)
return True
except Exception as e:
print(f"Failed to update task: {e}")
return False
async def get_task_dashboard_stats(user_id: str) -> Dict:
"""Get dashboard statistics for tasks."""
pool = await get_pool()
if pool is None:
return {}
try:
async with pool.acquire() as conn:
now = datetime.now()
today_end = now.replace(hour=23, minute=59, second=59)
week_end = now + timedelta(days=7)
stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_tasks,
COUNT(*) FILTER (WHERE status = 'pending') as pending_tasks,
COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress_tasks,
COUNT(*) FILTER (WHERE status = 'completed') as completed_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline < $2) as overdue_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline <= $3) as due_today,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline <= $4) as due_this_week
FROM inbox_tasks
WHERE user_id = $1
""",
user_id, now, today_end, week_end
)
by_priority = await conn.fetch(
"""
SELECT priority, COUNT(*) as count
FROM inbox_tasks
WHERE user_id = $1 AND status != 'completed'
GROUP BY priority
""",
user_id
)
by_sender = await conn.fetch(
"""
SELECT source_sender_type, COUNT(*) as count
FROM inbox_tasks
WHERE user_id = $1 AND status != 'completed' AND source_sender_type IS NOT NULL
GROUP BY source_sender_type
""",
user_id
)
return {
"total_tasks": stats['total_tasks'] or 0,
"pending_tasks": stats['pending_tasks'] or 0,
"in_progress_tasks": stats['in_progress_tasks'] or 0,
"completed_tasks": stats['completed_tasks'] or 0,
"overdue_tasks": stats['overdue_tasks'] or 0,
"due_today": stats['due_today'] or 0,
"due_this_week": stats['due_this_week'] or 0,
"by_priority": {r['priority']: r['count'] for r in by_priority},
"by_sender_type": {r['source_sender_type']: r['count'] for r in by_sender},
}
except Exception as e:
print(f"Failed to get task stats: {e}")
return {}
# =============================================================================
# Statistics & Audit
# =============================================================================
async def get_mail_stats(user_id: str) -> Dict:
"""Get overall mail statistics for a user."""
pool = await get_pool()
if pool is None:
return {}
try:
async with pool.acquire() as conn:
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
# Account stats
accounts = await conn.fetch(
"""
SELECT id, email, display_name, status, email_count, unread_count, last_sync
FROM external_email_accounts
WHERE user_id = $1
""",
user_id
)
# Email counts
email_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_emails,
COUNT(*) FILTER (WHERE is_read = FALSE) as unread_emails,
COUNT(*) FILTER (WHERE date_received >= $2) as emails_today,
COUNT(*) FILTER (WHERE ai_analyzed_at >= $2) as ai_analyses_today
FROM aggregated_emails
WHERE user_id = $1
""",
user_id, today
)
# Task counts
task_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_tasks,
COUNT(*) FILTER (WHERE status = 'pending') as pending_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline < NOW()) as overdue_tasks
FROM inbox_tasks
WHERE user_id = $1
""",
user_id
)
return {
"total_accounts": len(accounts),
"active_accounts": sum(1 for a in accounts if a['status'] == 'active'),
"error_accounts": sum(1 for a in accounts if a['status'] == 'error'),
"total_emails": email_stats['total_emails'] or 0,
"unread_emails": email_stats['unread_emails'] or 0,
"total_tasks": task_stats['total_tasks'] or 0,
"pending_tasks": task_stats['pending_tasks'] or 0,
"overdue_tasks": task_stats['overdue_tasks'] or 0,
"emails_today": email_stats['emails_today'] or 0,
"ai_analyses_today": email_stats['ai_analyses_today'] or 0,
"per_account": [
{
"id": a['id'],
"email": a['email'],
"display_name": a['display_name'],
"status": a['status'],
"email_count": a['email_count'],
"unread_count": a['unread_count'],
"last_sync": a['last_sync'].isoformat() if a['last_sync'] else None,
}
for a in accounts
],
}
except Exception as e:
print(f"Failed to get mail stats: {e}")
return {}
async def log_mail_audit(
user_id: str,
action: str,
entity_type: Optional[str] = None,
entity_id: Optional[str] = None,
details: Optional[Dict] = None,
tenant_id: Optional[str] = None,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
) -> bool:
"""Log a mail action for audit trail."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO mail_audit_log
(id, user_id, tenant_id, action, entity_type, entity_id, details, ip_address, user_agent)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
""",
str(uuid.uuid4()), user_id, tenant_id, action, entity_type, entity_id,
json.dumps(details) if details else None, ip_address, user_agent
)
return True
except Exception as e:
print(f"Failed to log mail audit: {e}")
return False

View File

@@ -0,0 +1,455 @@
"""
Unified Inbox Mail Models
Pydantic models for API requests/responses and internal data structures.
Database schema is defined in mail_db.py.
"""
from datetime import datetime
from enum import Enum
from typing import Optional, List, Dict, Any
from pydantic import BaseModel, Field, EmailStr
import uuid
# =============================================================================
# Enums
# =============================================================================
class AccountStatus(str, Enum):
"""Status of an email account connection."""
ACTIVE = "active"
INACTIVE = "inactive"
ERROR = "error"
PENDING = "pending"
class TaskStatus(str, Enum):
"""Status of an inbox task."""
PENDING = "pending"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
ARCHIVED = "archived"
class TaskPriority(str, Enum):
"""Priority level for tasks."""
LOW = "low"
MEDIUM = "medium"
HIGH = "high"
URGENT = "urgent"
class EmailCategory(str, Enum):
"""AI-detected email categories."""
DIENSTLICH = "dienstlich" # Official government/authority
PERSONAL = "personal" # Staff/HR matters
FINANZEN = "finanzen" # Finance/budget
ELTERN = "eltern" # Parent communication
SCHUELER = "schueler" # Student matters
KOLLEGIUM = "kollegium" # Teacher colleagues
FORTBILDUNG = "fortbildung" # Professional development
VERANSTALTUNG = "veranstaltung" # Events
SICHERHEIT = "sicherheit" # Safety/security
TECHNIK = "technik" # IT/technical
NEWSLETTER = "newsletter" # Newsletters
WERBUNG = "werbung" # Advertising/spam
SONSTIGES = "sonstiges" # Other
class SenderType(str, Enum):
"""Type of sender for classification."""
KULTUSMINISTERIUM = "kultusministerium"
LANDESSCHULBEHOERDE = "landesschulbehoerde"
RLSB = "rlsb" # Regionales Landesamt für Schule und Bildung
SCHULAMT = "schulamt"
NIBIS = "nibis"
SCHULTRAEGER = "schultraeger"
ELTERNVERTRETER = "elternvertreter"
GEWERKSCHAFT = "gewerkschaft"
FORTBILDUNGSINSTITUT = "fortbildungsinstitut"
PRIVATPERSON = "privatperson"
UNTERNEHMEN = "unternehmen"
UNBEKANNT = "unbekannt"
# =============================================================================
# Known Authority Domains (Niedersachsen)
# =============================================================================
KNOWN_AUTHORITIES_NI = {
"@mk.niedersachsen.de": {"type": SenderType.KULTUSMINISTERIUM, "name": "Kultusministerium Niedersachsen"},
"@rlsb.de": {"type": SenderType.RLSB, "name": "Regionales Landesamt für Schule und Bildung"},
"@rlsb-bs.niedersachsen.de": {"type": SenderType.RLSB, "name": "RLSB Braunschweig"},
"@rlsb-h.niedersachsen.de": {"type": SenderType.RLSB, "name": "RLSB Hannover"},
"@rlsb-lg.niedersachsen.de": {"type": SenderType.RLSB, "name": "RLSB Lüneburg"},
"@rlsb-os.niedersachsen.de": {"type": SenderType.RLSB, "name": "RLSB Osnabrück"},
"@landesschulbehoerde-nds.de": {"type": SenderType.LANDESSCHULBEHOERDE, "name": "Landesschulbehörde"},
"@nibis.de": {"type": SenderType.NIBIS, "name": "NiBiS"},
"@schule.niedersachsen.de": {"type": SenderType.LANDESSCHULBEHOERDE, "name": "Schulnetzwerk NI"},
"@nlq.nibis.de": {"type": SenderType.FORTBILDUNGSINSTITUT, "name": "NLQ"},
"@gew-nds.de": {"type": SenderType.GEWERKSCHAFT, "name": "GEW Niedersachsen"},
"@vbe-nds.de": {"type": SenderType.GEWERKSCHAFT, "name": "VBE Niedersachsen"},
}
# =============================================================================
# Email Account Models
# =============================================================================
class EmailAccountBase(BaseModel):
"""Base model for email account."""
email: EmailStr = Field(..., description="Email address")
display_name: str = Field(..., description="Display name for the account")
account_type: str = Field("personal", description="Type: personal, schulleitung, personal, verwaltung")
imap_host: str = Field(..., description="IMAP server hostname")
imap_port: int = Field(993, description="IMAP port (default: 993 for SSL)")
imap_ssl: bool = Field(True, description="Use SSL for IMAP")
smtp_host: str = Field(..., description="SMTP server hostname")
smtp_port: int = Field(465, description="SMTP port (default: 465 for SSL)")
smtp_ssl: bool = Field(True, description="Use SSL for SMTP")
class EmailAccountCreate(EmailAccountBase):
"""Model for creating a new email account."""
password: str = Field(..., description="Password (will be stored in Vault)")
class EmailAccountUpdate(BaseModel):
"""Model for updating an email account."""
display_name: Optional[str] = None
account_type: Optional[str] = None
imap_host: Optional[str] = None
imap_port: Optional[int] = None
smtp_host: Optional[str] = None
smtp_port: Optional[int] = None
password: Optional[str] = None # Only if changing
class EmailAccount(EmailAccountBase):
"""Full email account model (without password)."""
id: str
user_id: str
tenant_id: str
status: AccountStatus = AccountStatus.PENDING
last_sync: Optional[datetime] = None
sync_error: Optional[str] = None
email_count: int = 0
unread_count: int = 0
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
class AccountTestResult(BaseModel):
"""Result of testing email account connection."""
success: bool
imap_connected: bool = False
smtp_connected: bool = False
error_message: Optional[str] = None
folders_found: List[str] = []
# =============================================================================
# Aggregated Email Models
# =============================================================================
class AggregatedEmailBase(BaseModel):
"""Base model for an aggregated email."""
subject: str
sender_email: str
sender_name: Optional[str] = None
recipients: List[str] = []
cc: List[str] = []
body_preview: Optional[str] = None
has_attachments: bool = False
class AggregatedEmail(AggregatedEmailBase):
"""Full aggregated email model."""
id: str
account_id: str
message_id: str # Original IMAP message ID
folder: str = "INBOX"
is_read: bool = False
is_starred: bool = False
is_deleted: bool = False
body_text: Optional[str] = None
body_html: Optional[str] = None
attachments: List[Dict[str, Any]] = []
headers: Dict[str, str] = {}
date_sent: datetime
date_received: datetime
# AI-enriched fields
category: Optional[EmailCategory] = None
sender_type: Optional[SenderType] = None
sender_authority_name: Optional[str] = None
detected_deadlines: List[Dict[str, Any]] = []
suggested_priority: Optional[TaskPriority] = None
ai_summary: Optional[str] = None
created_at: datetime
class Config:
from_attributes = True
class EmailSearchParams(BaseModel):
"""Parameters for searching emails."""
query: Optional[str] = None
account_ids: Optional[List[str]] = None
categories: Optional[List[EmailCategory]] = None
is_read: Optional[bool] = None
is_starred: Optional[bool] = None
has_deadline: Optional[bool] = None
date_from: Optional[datetime] = None
date_to: Optional[datetime] = None
limit: int = Field(50, ge=1, le=200)
offset: int = Field(0, ge=0)
# =============================================================================
# Inbox Task Models (Arbeitsvorrat)
# =============================================================================
class TaskBase(BaseModel):
"""Base model for inbox task."""
title: str = Field(..., description="Task title")
description: Optional[str] = None
priority: TaskPriority = TaskPriority.MEDIUM
deadline: Optional[datetime] = None
class TaskCreate(TaskBase):
"""Model for creating a task manually."""
email_id: Optional[str] = None # Link to source email
class TaskUpdate(BaseModel):
"""Model for updating a task."""
title: Optional[str] = None
description: Optional[str] = None
priority: Optional[TaskPriority] = None
status: Optional[TaskStatus] = None
deadline: Optional[datetime] = None
completed_at: Optional[datetime] = None
class InboxTask(TaskBase):
"""Full inbox task model."""
id: str
user_id: str
tenant_id: str
email_id: Optional[str] = None
account_id: Optional[str] = None
status: TaskStatus = TaskStatus.PENDING
# Source information
source_email_subject: Optional[str] = None
source_sender: Optional[str] = None
source_sender_type: Optional[SenderType] = None
# AI-extracted information
ai_extracted: bool = False
confidence_score: Optional[float] = None
completed_at: Optional[datetime] = None
reminder_at: Optional[datetime] = None
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
class TaskDashboardStats(BaseModel):
"""Dashboard statistics for tasks."""
total_tasks: int = 0
pending_tasks: int = 0
in_progress_tasks: int = 0
completed_tasks: int = 0
overdue_tasks: int = 0
due_today: int = 0
due_this_week: int = 0
by_priority: Dict[str, int] = {}
by_sender_type: Dict[str, int] = {}
# =============================================================================
# AI Analysis Models
# =============================================================================
class SenderClassification(BaseModel):
"""Result of AI sender classification."""
sender_type: SenderType
authority_name: Optional[str] = None
confidence: float = Field(..., ge=0, le=1)
domain_matched: bool = False
ai_classified: bool = False
class DeadlineExtraction(BaseModel):
"""Extracted deadline from email."""
deadline_date: datetime
description: str
confidence: float = Field(..., ge=0, le=1)
source_text: str # Original text containing the deadline
is_firm: bool = True # True for "bis zum", False for "etwa"
class EmailAnalysisResult(BaseModel):
"""Complete AI analysis result for an email."""
email_id: str
category: EmailCategory
category_confidence: float
sender_classification: SenderClassification
deadlines: List[DeadlineExtraction] = []
suggested_priority: TaskPriority
summary: Optional[str] = None
suggested_actions: List[str] = []
auto_create_task: bool = False
class ResponseSuggestion(BaseModel):
"""AI-generated response suggestion."""
template_type: str # "acknowledgment", "request_info", "delegation", etc.
subject: str
body: str
confidence: float
# =============================================================================
# Email Template Models
# =============================================================================
class EmailTemplateBase(BaseModel):
"""Base model for email template."""
name: str
category: str # "acknowledgment", "request", "forwarding", etc.
subject_template: str
body_template: str
variables: List[str] = [] # e.g., ["sender_name", "deadline", "topic"]
class EmailTemplateCreate(EmailTemplateBase):
"""Model for creating a template."""
pass
class EmailTemplate(EmailTemplateBase):
"""Full email template model."""
id: str
user_id: Optional[str] = None # None = system template
tenant_id: Optional[str] = None
is_system: bool = False
usage_count: int = 0
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
# =============================================================================
# Compose Email Models
# =============================================================================
class EmailComposeRequest(BaseModel):
"""Request to compose/send an email."""
account_id: str = Field(..., description="Account to send from")
to: List[EmailStr]
cc: Optional[List[EmailStr]] = []
bcc: Optional[List[EmailStr]] = []
subject: str
body: str
is_html: bool = False
reply_to_message_id: Optional[str] = None
attachments: Optional[List[Dict[str, Any]]] = None
class EmailSendResult(BaseModel):
"""Result of sending an email."""
success: bool
message_id: Optional[str] = None
error: Optional[str] = None
# =============================================================================
# Statistics & Health Models
# =============================================================================
class MailStats(BaseModel):
"""Overall mail system statistics."""
total_accounts: int = 0
active_accounts: int = 0
error_accounts: int = 0
total_emails: int = 0
unread_emails: int = 0
total_tasks: int = 0
pending_tasks: int = 0
overdue_tasks: int = 0
emails_today: int = 0
ai_analyses_today: int = 0
per_account: List[Dict[str, Any]] = []
class MailHealthCheck(BaseModel):
"""Health check for mail system."""
status: str # "healthy", "degraded", "unhealthy"
database_connected: bool = False
vault_connected: bool = False
accounts_checked: int = 0
accounts_healthy: int = 0
last_sync: Optional[datetime] = None
errors: List[str] = []
# =============================================================================
# Helper Functions
# =============================================================================
def generate_id() -> str:
"""Generate a new UUID."""
return str(uuid.uuid4())
def classify_sender_by_domain(email: str) -> Optional[SenderClassification]:
"""
Classify sender by known authority domains.
Returns None if domain is not recognized.
"""
email_lower = email.lower()
for domain, info in KNOWN_AUTHORITIES_NI.items():
if domain in email_lower:
return SenderClassification(
sender_type=info["type"],
authority_name=info["name"],
confidence=0.95,
domain_matched=True,
ai_classified=False,
)
return None
def get_priority_from_sender_type(sender_type: SenderType) -> TaskPriority:
"""Get suggested priority based on sender type."""
high_priority = {
SenderType.KULTUSMINISTERIUM,
SenderType.LANDESSCHULBEHOERDE,
SenderType.RLSB,
SenderType.SCHULAMT,
}
medium_priority = {
SenderType.NIBIS,
SenderType.SCHULTRAEGER,
SenderType.ELTERNVERTRETER,
}
if sender_type in high_priority:
return TaskPriority.HIGH
elif sender_type in medium_priority:
return TaskPriority.MEDIUM
return TaskPriority.LOW

View File

@@ -0,0 +1,421 @@
"""
Inbox Task Service (Arbeitsvorrat)
Manages tasks extracted from emails and manual task creation.
"""
import logging
from typing import Optional, List, Dict, Any
from datetime import datetime, timedelta
from .models import (
TaskStatus,
TaskPriority,
InboxTask,
TaskCreate,
TaskUpdate,
TaskDashboardStats,
SenderType,
DeadlineExtraction,
)
from .mail_db import (
create_task as db_create_task,
get_tasks as db_get_tasks,
get_task as db_get_task,
update_task as db_update_task,
get_task_dashboard_stats as db_get_dashboard_stats,
get_email,
log_mail_audit,
)
logger = logging.getLogger(__name__)
class TaskService:
"""
Service for managing inbox tasks (Arbeitsvorrat).
Features:
- Create tasks from emails (auto or manual)
- Track deadlines and priorities
- Dashboard statistics
- Reminders (to be integrated with notification service)
"""
async def create_task_from_email(
self,
user_id: str,
tenant_id: str,
email_id: str,
deadlines: List[DeadlineExtraction],
sender_type: Optional[SenderType] = None,
auto_created: bool = False,
) -> Optional[str]:
"""
Create a task from an analyzed email.
Args:
user_id: The user ID
tenant_id: The tenant ID
email_id: The source email ID
deadlines: Extracted deadlines
sender_type: Classified sender type
auto_created: Whether this was auto-created by AI
Returns:
Task ID if created successfully
"""
# Get email details
email_data = await get_email(email_id, user_id)
if not email_data:
logger.warning(f"Email not found: {email_id}")
return None
# Determine priority
priority = TaskPriority.MEDIUM
if sender_type:
priority = self._get_priority_from_sender(sender_type)
# Get earliest deadline
deadline = None
if deadlines:
deadline = min(d.deadline_date for d in deadlines)
# Adjust priority based on deadline
priority = self._adjust_priority_for_deadline(priority, deadline)
# Create task title from email subject
subject = email_data.get("subject", "Keine Betreffzeile")
title = f"Bearbeiten: {subject[:100]}"
# Create description from deadlines
description = self._build_task_description(deadlines, email_data)
# Create the task
task_id = await db_create_task(
user_id=user_id,
tenant_id=tenant_id,
title=title,
description=description,
priority=priority.value,
deadline=deadline,
email_id=email_id,
account_id=email_data.get("account_id"),
source_email_subject=subject,
source_sender=email_data.get("sender_email"),
source_sender_type=sender_type.value if sender_type else None,
ai_extracted=auto_created,
confidence_score=deadlines[0].confidence if deadlines else None,
)
if task_id:
# Log audit event
await log_mail_audit(
user_id=user_id,
action="task_created",
entity_type="task",
entity_id=task_id,
details={
"email_id": email_id,
"auto_created": auto_created,
"deadline": deadline.isoformat() if deadline else None,
},
tenant_id=tenant_id,
)
logger.info(f"Created task {task_id} from email {email_id}")
return task_id
async def create_manual_task(
self,
user_id: str,
tenant_id: str,
task_data: TaskCreate,
) -> Optional[str]:
"""
Create a task manually (not from email).
Args:
user_id: The user ID
tenant_id: The tenant ID
task_data: Task creation data
Returns:
Task ID if created successfully
"""
# Get email details if linked
source_subject = None
source_sender = None
account_id = None
if task_data.email_id:
email_data = await get_email(task_data.email_id, user_id)
if email_data:
source_subject = email_data.get("subject")
source_sender = email_data.get("sender_email")
account_id = email_data.get("account_id")
task_id = await db_create_task(
user_id=user_id,
tenant_id=tenant_id,
title=task_data.title,
description=task_data.description,
priority=task_data.priority.value,
deadline=task_data.deadline,
email_id=task_data.email_id,
account_id=account_id,
source_email_subject=source_subject,
source_sender=source_sender,
ai_extracted=False,
)
if task_id:
await log_mail_audit(
user_id=user_id,
action="task_created_manual",
entity_type="task",
entity_id=task_id,
details={"title": task_data.title},
tenant_id=tenant_id,
)
return task_id
async def get_user_tasks(
self,
user_id: str,
status: Optional[TaskStatus] = None,
priority: Optional[TaskPriority] = None,
include_completed: bool = False,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""
Get tasks for a user with filtering.
Args:
user_id: The user ID
status: Filter by status
priority: Filter by priority
include_completed: Include completed tasks
limit: Maximum results
offset: Pagination offset
Returns:
List of task dictionaries
"""
return await db_get_tasks(
user_id=user_id,
status=status.value if status else None,
priority=priority.value if priority else None,
include_completed=include_completed,
limit=limit,
offset=offset,
)
async def get_task(self, task_id: str, user_id: str) -> Optional[Dict]:
"""Get a single task by ID."""
return await db_get_task(task_id, user_id)
async def update_task(
self,
task_id: str,
user_id: str,
updates: TaskUpdate,
) -> bool:
"""
Update a task.
Args:
task_id: The task ID
user_id: The user ID
updates: Fields to update
Returns:
True if successful
"""
update_dict = {}
if updates.title is not None:
update_dict["title"] = updates.title
if updates.description is not None:
update_dict["description"] = updates.description
if updates.priority is not None:
update_dict["priority"] = updates.priority.value
if updates.status is not None:
update_dict["status"] = updates.status.value
if updates.deadline is not None:
update_dict["deadline"] = updates.deadline
success = await db_update_task(task_id, user_id, **update_dict)
if success:
await log_mail_audit(
user_id=user_id,
action="task_updated",
entity_type="task",
entity_id=task_id,
details={"updates": update_dict},
)
return success
async def mark_completed(self, task_id: str, user_id: str) -> bool:
"""Mark a task as completed."""
success = await db_update_task(
task_id, user_id, status=TaskStatus.COMPLETED.value
)
if success:
await log_mail_audit(
user_id=user_id,
action="task_completed",
entity_type="task",
entity_id=task_id,
)
return success
async def mark_in_progress(self, task_id: str, user_id: str) -> bool:
"""Mark a task as in progress."""
return await db_update_task(
task_id, user_id, status=TaskStatus.IN_PROGRESS.value
)
async def get_dashboard_stats(self, user_id: str) -> TaskDashboardStats:
"""
Get dashboard statistics for a user.
Returns:
TaskDashboardStats with all metrics
"""
stats = await db_get_dashboard_stats(user_id)
return TaskDashboardStats(
total_tasks=stats.get("total_tasks", 0),
pending_tasks=stats.get("pending_tasks", 0),
in_progress_tasks=stats.get("in_progress_tasks", 0),
completed_tasks=stats.get("completed_tasks", 0),
overdue_tasks=stats.get("overdue_tasks", 0),
due_today=stats.get("due_today", 0),
due_this_week=stats.get("due_this_week", 0),
by_priority=stats.get("by_priority", {}),
by_sender_type=stats.get("by_sender_type", {}),
)
async def get_overdue_tasks(self, user_id: str) -> List[Dict]:
"""Get all overdue tasks for a user."""
all_tasks = await db_get_tasks(user_id, include_completed=False, limit=500)
now = datetime.now()
overdue = [
task for task in all_tasks
if task.get("deadline") and task["deadline"] < now
]
return overdue
async def get_tasks_due_soon(
self,
user_id: str,
days: int = 3,
) -> List[Dict]:
"""Get tasks due within the specified number of days."""
all_tasks = await db_get_tasks(user_id, include_completed=False, limit=500)
now = datetime.now()
deadline_cutoff = now + timedelta(days=days)
due_soon = [
task for task in all_tasks
if task.get("deadline") and now <= task["deadline"] <= deadline_cutoff
]
return sorted(due_soon, key=lambda t: t["deadline"])
# =========================================================================
# Helper Methods
# =========================================================================
def _get_priority_from_sender(self, sender_type: SenderType) -> TaskPriority:
"""Determine priority based on sender type."""
high_priority_senders = {
SenderType.KULTUSMINISTERIUM,
SenderType.LANDESSCHULBEHOERDE,
SenderType.RLSB,
SenderType.SCHULAMT,
}
medium_priority_senders = {
SenderType.NIBIS,
SenderType.SCHULTRAEGER,
SenderType.ELTERNVERTRETER,
}
if sender_type in high_priority_senders:
return TaskPriority.HIGH
elif sender_type in medium_priority_senders:
return TaskPriority.MEDIUM
else:
return TaskPriority.LOW
def _adjust_priority_for_deadline(
self,
current_priority: TaskPriority,
deadline: datetime,
) -> TaskPriority:
"""Adjust priority based on deadline proximity."""
now = datetime.now()
days_until = (deadline - now).days
if days_until <= 1:
return TaskPriority.URGENT
elif days_until <= 3:
return max(current_priority, TaskPriority.HIGH)
elif days_until <= 7:
return max(current_priority, TaskPriority.MEDIUM)
else:
return current_priority
def _build_task_description(
self,
deadlines: List[DeadlineExtraction],
email_data: Dict,
) -> str:
"""Build a task description from deadlines and email data."""
parts = []
# Add deadlines
if deadlines:
parts.append("**Fristen:**")
for d in deadlines:
date_str = d.deadline_date.strftime("%d.%m.%Y")
firm_str = " (verbindlich)" if d.is_firm else ""
parts.append(f"- {date_str}: {d.description}{firm_str}")
parts.append("")
# Add email info
sender = email_data.get("sender_email", "Unbekannt")
parts.append(f"**Von:** {sender}")
# Add preview
preview = email_data.get("body_preview", "")
if preview:
parts.append(f"\n**Vorschau:**\n{preview[:300]}...")
return "\n".join(parts)
# Global instance
_task_service: Optional[TaskService] = None
def get_task_service() -> TaskService:
"""Get or create the global TaskService instance."""
global _task_service
if _task_service is None:
_task_service = TaskService()
return _task_service