Files
breakpilot-lehrer/klausur-service/backend/mail/mail_db.py
Benjamin Boenisch 5a31f52310 Initial commit: breakpilot-lehrer - Lehrer KI Platform
Services: Admin-Lehrer, Backend-Lehrer, Studio v2, Website,
Klausur-Service, School-Service, Voice-Service, Geo-Service,
BreakPilot Drive, Agent-Core

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 23:47:26 +01:00

988 lines
33 KiB
Python

"""
Unified Inbox Mail Database Service
PostgreSQL database operations for multi-account mail aggregation.
"""
import os
import json
import uuid
from typing import Optional, List, Dict, Any
from datetime import datetime, timedelta
# Database Configuration - from Vault or environment (test default for CI)
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://test:test@localhost:5432/test")
# Flag to check if using test defaults
_DB_CONFIGURED = DATABASE_URL != "postgresql://test:test@localhost:5432/test"
# Connection pool (shared with metrics_db)
_pool = None
async def get_pool():
"""Get or create database connection pool."""
global _pool
if _pool is None:
try:
import asyncpg
_pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10)
except ImportError:
print("Warning: asyncpg not installed. Mail database disabled.")
return None
except Exception as e:
print(f"Warning: Failed to connect to PostgreSQL: {e}")
return None
return _pool
async def init_mail_tables() -> bool:
"""Initialize mail tables in PostgreSQL."""
pool = await get_pool()
if pool is None:
return False
create_tables_sql = """
-- =============================================================================
-- External Email Accounts
-- =============================================================================
CREATE TABLE IF NOT EXISTS external_email_accounts (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
email VARCHAR(255) NOT NULL,
display_name VARCHAR(255),
account_type VARCHAR(50) DEFAULT 'personal',
-- IMAP Settings (password stored in Vault)
imap_host VARCHAR(255) NOT NULL,
imap_port INTEGER DEFAULT 993,
imap_ssl BOOLEAN DEFAULT TRUE,
-- SMTP Settings
smtp_host VARCHAR(255) NOT NULL,
smtp_port INTEGER DEFAULT 465,
smtp_ssl BOOLEAN DEFAULT TRUE,
-- Vault path for credentials
vault_path VARCHAR(500),
-- Status tracking
status VARCHAR(20) DEFAULT 'pending',
last_sync TIMESTAMP,
sync_error TEXT,
email_count INTEGER DEFAULT 0,
unread_count INTEGER DEFAULT 0,
-- Timestamps
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW(),
-- Constraints
UNIQUE(user_id, email)
);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_user ON external_email_accounts(user_id);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_tenant ON external_email_accounts(tenant_id);
CREATE INDEX IF NOT EXISTS idx_mail_accounts_status ON external_email_accounts(status);
-- =============================================================================
-- Aggregated Emails
-- =============================================================================
CREATE TABLE IF NOT EXISTS aggregated_emails (
id VARCHAR(36) PRIMARY KEY,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE CASCADE,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
-- Email identification
message_id VARCHAR(500) NOT NULL,
folder VARCHAR(100) DEFAULT 'INBOX',
-- Email content
subject TEXT,
sender_email VARCHAR(255),
sender_name VARCHAR(255),
recipients JSONB DEFAULT '[]',
cc JSONB DEFAULT '[]',
body_preview TEXT,
body_text TEXT,
body_html TEXT,
has_attachments BOOLEAN DEFAULT FALSE,
attachments JSONB DEFAULT '[]',
headers JSONB DEFAULT '{}',
-- Status flags
is_read BOOLEAN DEFAULT FALSE,
is_starred BOOLEAN DEFAULT FALSE,
is_deleted BOOLEAN DEFAULT FALSE,
-- Dates
date_sent TIMESTAMP,
date_received TIMESTAMP,
-- AI enrichment
category VARCHAR(50),
sender_type VARCHAR(50),
sender_authority_name VARCHAR(255),
detected_deadlines JSONB DEFAULT '[]',
suggested_priority VARCHAR(20),
ai_summary TEXT,
ai_analyzed_at TIMESTAMP,
created_at TIMESTAMP DEFAULT NOW(),
-- Prevent duplicate imports
UNIQUE(account_id, message_id)
);
CREATE INDEX IF NOT EXISTS idx_emails_account ON aggregated_emails(account_id);
CREATE INDEX IF NOT EXISTS idx_emails_user ON aggregated_emails(user_id);
CREATE INDEX IF NOT EXISTS idx_emails_tenant ON aggregated_emails(tenant_id);
CREATE INDEX IF NOT EXISTS idx_emails_date ON aggregated_emails(date_received DESC);
CREATE INDEX IF NOT EXISTS idx_emails_category ON aggregated_emails(category);
CREATE INDEX IF NOT EXISTS idx_emails_unread ON aggregated_emails(is_read) WHERE is_read = FALSE;
CREATE INDEX IF NOT EXISTS idx_emails_starred ON aggregated_emails(is_starred) WHERE is_starred = TRUE;
CREATE INDEX IF NOT EXISTS idx_emails_sender ON aggregated_emails(sender_email);
-- =============================================================================
-- Inbox Tasks (Arbeitsvorrat)
-- =============================================================================
CREATE TABLE IF NOT EXISTS inbox_tasks (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36) NOT NULL,
email_id VARCHAR(36) REFERENCES aggregated_emails(id) ON DELETE SET NULL,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE SET NULL,
-- Task content
title VARCHAR(500) NOT NULL,
description TEXT,
priority VARCHAR(20) DEFAULT 'medium',
status VARCHAR(20) DEFAULT 'pending',
deadline TIMESTAMP,
-- Source information
source_email_subject TEXT,
source_sender VARCHAR(255),
source_sender_type VARCHAR(50),
-- AI extraction info
ai_extracted BOOLEAN DEFAULT FALSE,
confidence_score FLOAT,
-- Completion tracking
completed_at TIMESTAMP,
reminder_at TIMESTAMP,
-- Timestamps
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_tasks_user ON inbox_tasks(user_id);
CREATE INDEX IF NOT EXISTS idx_tasks_tenant ON inbox_tasks(tenant_id);
CREATE INDEX IF NOT EXISTS idx_tasks_status ON inbox_tasks(status);
CREATE INDEX IF NOT EXISTS idx_tasks_deadline ON inbox_tasks(deadline) WHERE deadline IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_tasks_priority ON inbox_tasks(priority);
CREATE INDEX IF NOT EXISTS idx_tasks_email ON inbox_tasks(email_id) WHERE email_id IS NOT NULL;
-- =============================================================================
-- Email Templates
-- =============================================================================
CREATE TABLE IF NOT EXISTS email_templates (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36), -- NULL for system templates
tenant_id VARCHAR(36),
name VARCHAR(255) NOT NULL,
category VARCHAR(100),
subject_template TEXT,
body_template TEXT,
variables JSONB DEFAULT '[]',
is_system BOOLEAN DEFAULT FALSE,
usage_count INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_templates_user ON email_templates(user_id);
CREATE INDEX IF NOT EXISTS idx_templates_tenant ON email_templates(tenant_id);
CREATE INDEX IF NOT EXISTS idx_templates_system ON email_templates(is_system);
-- =============================================================================
-- Mail Audit Log
-- =============================================================================
CREATE TABLE IF NOT EXISTS mail_audit_log (
id VARCHAR(36) PRIMARY KEY,
user_id VARCHAR(36) NOT NULL,
tenant_id VARCHAR(36),
action VARCHAR(100) NOT NULL,
entity_type VARCHAR(50), -- account, email, task
entity_id VARCHAR(36),
details JSONB,
ip_address VARCHAR(45),
user_agent TEXT,
created_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_mail_audit_user ON mail_audit_log(user_id);
CREATE INDEX IF NOT EXISTS idx_mail_audit_created ON mail_audit_log(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_mail_audit_action ON mail_audit_log(action);
-- =============================================================================
-- Sync Status Tracking
-- =============================================================================
CREATE TABLE IF NOT EXISTS mail_sync_status (
id VARCHAR(36) PRIMARY KEY,
account_id VARCHAR(36) REFERENCES external_email_accounts(id) ON DELETE CASCADE,
folder VARCHAR(100),
last_uid INTEGER DEFAULT 0,
last_sync TIMESTAMP,
sync_errors INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW(),
UNIQUE(account_id, folder)
);
"""
try:
async with pool.acquire() as conn:
await conn.execute(create_tables_sql)
print("Mail tables initialized successfully")
return True
except Exception as e:
print(f"Failed to initialize mail tables: {e}")
return False
# =============================================================================
# Email Account Operations
# =============================================================================
async def create_email_account(
user_id: str,
tenant_id: str,
email: str,
display_name: str,
account_type: str,
imap_host: str,
imap_port: int,
imap_ssl: bool,
smtp_host: str,
smtp_port: int,
smtp_ssl: bool,
vault_path: str,
) -> Optional[str]:
"""Create a new email account. Returns the account ID."""
pool = await get_pool()
if pool is None:
return None
account_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO external_email_accounts
(id, user_id, tenant_id, email, display_name, account_type,
imap_host, imap_port, imap_ssl, smtp_host, smtp_port, smtp_ssl, vault_path)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
""",
account_id, user_id, tenant_id, email, display_name, account_type,
imap_host, imap_port, imap_ssl, smtp_host, smtp_port, smtp_ssl, vault_path
)
return account_id
except Exception as e:
print(f"Failed to create email account: {e}")
return None
async def get_email_accounts(
user_id: str,
tenant_id: Optional[str] = None,
) -> List[Dict]:
"""Get all email accounts for a user."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
if tenant_id:
rows = await conn.fetch(
"""
SELECT * FROM external_email_accounts
WHERE user_id = $1 AND tenant_id = $2
ORDER BY created_at
""",
user_id, tenant_id
)
else:
rows = await conn.fetch(
"""
SELECT * FROM external_email_accounts
WHERE user_id = $1
ORDER BY created_at
""",
user_id
)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get email accounts: {e}")
return []
async def get_email_account(account_id: str, user_id: str) -> Optional[Dict]:
"""Get a single email account."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT * FROM external_email_accounts
WHERE id = $1 AND user_id = $2
""",
account_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get email account: {e}")
return None
async def update_account_status(
account_id: str,
status: str,
sync_error: Optional[str] = None,
email_count: Optional[int] = None,
unread_count: Optional[int] = None,
) -> bool:
"""Update account sync status."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE external_email_accounts SET
status = $2,
sync_error = $3,
email_count = COALESCE($4, email_count),
unread_count = COALESCE($5, unread_count),
last_sync = NOW(),
updated_at = NOW()
WHERE id = $1
""",
account_id, status, sync_error, email_count, unread_count
)
return True
except Exception as e:
print(f"Failed to update account status: {e}")
return False
async def delete_email_account(account_id: str, user_id: str) -> bool:
"""Delete an email account (cascades to emails)."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
result = await conn.execute(
"""
DELETE FROM external_email_accounts
WHERE id = $1 AND user_id = $2
""",
account_id, user_id
)
return "DELETE" in result
except Exception as e:
print(f"Failed to delete email account: {e}")
return False
# =============================================================================
# Aggregated Email Operations
# =============================================================================
async def upsert_email(
account_id: str,
user_id: str,
tenant_id: str,
message_id: str,
subject: str,
sender_email: str,
sender_name: Optional[str],
recipients: List[str],
cc: List[str],
body_preview: Optional[str],
body_text: Optional[str],
body_html: Optional[str],
has_attachments: bool,
attachments: List[Dict],
headers: Dict,
folder: str,
date_sent: datetime,
date_received: datetime,
) -> Optional[str]:
"""Insert or update an email. Returns the email ID."""
pool = await get_pool()
if pool is None:
return None
email_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
# Try insert, on conflict update (for re-sync scenarios)
row = await conn.fetchrow(
"""
INSERT INTO aggregated_emails
(id, account_id, user_id, tenant_id, message_id, subject,
sender_email, sender_name, recipients, cc, body_preview,
body_text, body_html, has_attachments, attachments, headers,
folder, date_sent, date_received)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
ON CONFLICT (account_id, message_id) DO UPDATE SET
subject = EXCLUDED.subject,
is_read = EXCLUDED.is_read,
folder = EXCLUDED.folder
RETURNING id
""",
email_id, account_id, user_id, tenant_id, message_id, subject,
sender_email, sender_name, json.dumps(recipients), json.dumps(cc),
body_preview, body_text, body_html, has_attachments,
json.dumps(attachments), json.dumps(headers), folder,
date_sent, date_received
)
return row['id'] if row else None
except Exception as e:
print(f"Failed to upsert email: {e}")
return None
async def get_unified_inbox(
user_id: str,
account_ids: Optional[List[str]] = None,
categories: Optional[List[str]] = None,
is_read: Optional[bool] = None,
is_starred: Optional[bool] = None,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""Get unified inbox with filtering."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
# Build dynamic query
conditions = ["user_id = $1", "is_deleted = FALSE"]
params = [user_id]
param_idx = 2
if account_ids:
conditions.append(f"account_id = ANY(${param_idx})")
params.append(account_ids)
param_idx += 1
if categories:
conditions.append(f"category = ANY(${param_idx})")
params.append(categories)
param_idx += 1
if is_read is not None:
conditions.append(f"is_read = ${param_idx}")
params.append(is_read)
param_idx += 1
if is_starred is not None:
conditions.append(f"is_starred = ${param_idx}")
params.append(is_starred)
param_idx += 1
where_clause = " AND ".join(conditions)
params.extend([limit, offset])
query = f"""
SELECT e.*, a.email as account_email, a.display_name as account_name
FROM aggregated_emails e
JOIN external_email_accounts a ON e.account_id = a.id
WHERE {where_clause}
ORDER BY e.date_received DESC
LIMIT ${param_idx} OFFSET ${param_idx + 1}
"""
rows = await conn.fetch(query, *params)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get unified inbox: {e}")
return []
async def get_email(email_id: str, user_id: str) -> Optional[Dict]:
"""Get a single email by ID."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT e.*, a.email as account_email, a.display_name as account_name
FROM aggregated_emails e
JOIN external_email_accounts a ON e.account_id = a.id
WHERE e.id = $1 AND e.user_id = $2
""",
email_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get email: {e}")
return None
async def update_email_ai_analysis(
email_id: str,
category: str,
sender_type: str,
sender_authority_name: Optional[str],
detected_deadlines: List[Dict],
suggested_priority: str,
ai_summary: Optional[str],
) -> bool:
"""Update email with AI analysis results."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET
category = $2,
sender_type = $3,
sender_authority_name = $4,
detected_deadlines = $5,
suggested_priority = $6,
ai_summary = $7,
ai_analyzed_at = NOW()
WHERE id = $1
""",
email_id, category, sender_type, sender_authority_name,
json.dumps(detected_deadlines), suggested_priority, ai_summary
)
return True
except Exception as e:
print(f"Failed to update email AI analysis: {e}")
return False
async def mark_email_read(email_id: str, user_id: str, is_read: bool = True) -> bool:
"""Mark email as read/unread."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET is_read = $3
WHERE id = $1 AND user_id = $2
""",
email_id, user_id, is_read
)
return True
except Exception as e:
print(f"Failed to mark email read: {e}")
return False
async def mark_email_starred(email_id: str, user_id: str, is_starred: bool = True) -> bool:
"""Mark email as starred/unstarred."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
UPDATE aggregated_emails SET is_starred = $3
WHERE id = $1 AND user_id = $2
""",
email_id, user_id, is_starred
)
return True
except Exception as e:
print(f"Failed to mark email starred: {e}")
return False
# =============================================================================
# Inbox Task Operations
# =============================================================================
async def create_task(
user_id: str,
tenant_id: str,
title: str,
description: Optional[str] = None,
priority: str = "medium",
deadline: Optional[datetime] = None,
email_id: Optional[str] = None,
account_id: Optional[str] = None,
source_email_subject: Optional[str] = None,
source_sender: Optional[str] = None,
source_sender_type: Optional[str] = None,
ai_extracted: bool = False,
confidence_score: Optional[float] = None,
) -> Optional[str]:
"""Create a new inbox task."""
pool = await get_pool()
if pool is None:
return None
task_id = str(uuid.uuid4())
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO inbox_tasks
(id, user_id, tenant_id, title, description, priority, deadline,
email_id, account_id, source_email_subject, source_sender,
source_sender_type, ai_extracted, confidence_score)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
""",
task_id, user_id, tenant_id, title, description, priority, deadline,
email_id, account_id, source_email_subject, source_sender,
source_sender_type, ai_extracted, confidence_score
)
return task_id
except Exception as e:
print(f"Failed to create task: {e}")
return None
async def get_tasks(
user_id: str,
status: Optional[str] = None,
priority: Optional[str] = None,
include_completed: bool = False,
limit: int = 50,
offset: int = 0,
) -> List[Dict]:
"""Get tasks for a user."""
pool = await get_pool()
if pool is None:
return []
try:
async with pool.acquire() as conn:
conditions = ["user_id = $1"]
params = [user_id]
param_idx = 2
if not include_completed:
conditions.append("status != 'completed'")
if status:
conditions.append(f"status = ${param_idx}")
params.append(status)
param_idx += 1
if priority:
conditions.append(f"priority = ${param_idx}")
params.append(priority)
param_idx += 1
where_clause = " AND ".join(conditions)
params.extend([limit, offset])
query = f"""
SELECT * FROM inbox_tasks
WHERE {where_clause}
ORDER BY
CASE priority
WHEN 'urgent' THEN 1
WHEN 'high' THEN 2
WHEN 'medium' THEN 3
WHEN 'low' THEN 4
END,
deadline ASC NULLS LAST,
created_at DESC
LIMIT ${param_idx} OFFSET ${param_idx + 1}
"""
rows = await conn.fetch(query, *params)
return [dict(r) for r in rows]
except Exception as e:
print(f"Failed to get tasks: {e}")
return []
async def get_task(task_id: str, user_id: str) -> Optional[Dict]:
"""Get a single task."""
pool = await get_pool()
if pool is None:
return None
try:
async with pool.acquire() as conn:
row = await conn.fetchrow(
"SELECT * FROM inbox_tasks WHERE id = $1 AND user_id = $2",
task_id, user_id
)
return dict(row) if row else None
except Exception as e:
print(f"Failed to get task: {e}")
return None
async def update_task(
task_id: str,
user_id: str,
title: Optional[str] = None,
description: Optional[str] = None,
priority: Optional[str] = None,
status: Optional[str] = None,
deadline: Optional[datetime] = None,
) -> bool:
"""Update a task."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
# Build dynamic update
updates = ["updated_at = NOW()"]
params = [task_id, user_id]
param_idx = 3
if title is not None:
updates.append(f"title = ${param_idx}")
params.append(title)
param_idx += 1
if description is not None:
updates.append(f"description = ${param_idx}")
params.append(description)
param_idx += 1
if priority is not None:
updates.append(f"priority = ${param_idx}")
params.append(priority)
param_idx += 1
if status is not None:
updates.append(f"status = ${param_idx}")
params.append(status)
param_idx += 1
if status == "completed":
updates.append("completed_at = NOW()")
if deadline is not None:
updates.append(f"deadline = ${param_idx}")
params.append(deadline)
param_idx += 1
set_clause = ", ".join(updates)
await conn.execute(
f"UPDATE inbox_tasks SET {set_clause} WHERE id = $1 AND user_id = $2",
*params
)
return True
except Exception as e:
print(f"Failed to update task: {e}")
return False
async def get_task_dashboard_stats(user_id: str) -> Dict:
"""Get dashboard statistics for tasks."""
pool = await get_pool()
if pool is None:
return {}
try:
async with pool.acquire() as conn:
now = datetime.now()
today_end = now.replace(hour=23, minute=59, second=59)
week_end = now + timedelta(days=7)
stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_tasks,
COUNT(*) FILTER (WHERE status = 'pending') as pending_tasks,
COUNT(*) FILTER (WHERE status = 'in_progress') as in_progress_tasks,
COUNT(*) FILTER (WHERE status = 'completed') as completed_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline < $2) as overdue_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline <= $3) as due_today,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline <= $4) as due_this_week
FROM inbox_tasks
WHERE user_id = $1
""",
user_id, now, today_end, week_end
)
by_priority = await conn.fetch(
"""
SELECT priority, COUNT(*) as count
FROM inbox_tasks
WHERE user_id = $1 AND status != 'completed'
GROUP BY priority
""",
user_id
)
by_sender = await conn.fetch(
"""
SELECT source_sender_type, COUNT(*) as count
FROM inbox_tasks
WHERE user_id = $1 AND status != 'completed' AND source_sender_type IS NOT NULL
GROUP BY source_sender_type
""",
user_id
)
return {
"total_tasks": stats['total_tasks'] or 0,
"pending_tasks": stats['pending_tasks'] or 0,
"in_progress_tasks": stats['in_progress_tasks'] or 0,
"completed_tasks": stats['completed_tasks'] or 0,
"overdue_tasks": stats['overdue_tasks'] or 0,
"due_today": stats['due_today'] or 0,
"due_this_week": stats['due_this_week'] or 0,
"by_priority": {r['priority']: r['count'] for r in by_priority},
"by_sender_type": {r['source_sender_type']: r['count'] for r in by_sender},
}
except Exception as e:
print(f"Failed to get task stats: {e}")
return {}
# =============================================================================
# Statistics & Audit
# =============================================================================
async def get_mail_stats(user_id: str) -> Dict:
"""Get overall mail statistics for a user."""
pool = await get_pool()
if pool is None:
return {}
try:
async with pool.acquire() as conn:
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
# Account stats
accounts = await conn.fetch(
"""
SELECT id, email, display_name, status, email_count, unread_count, last_sync
FROM external_email_accounts
WHERE user_id = $1
""",
user_id
)
# Email counts
email_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_emails,
COUNT(*) FILTER (WHERE is_read = FALSE) as unread_emails,
COUNT(*) FILTER (WHERE date_received >= $2) as emails_today,
COUNT(*) FILTER (WHERE ai_analyzed_at >= $2) as ai_analyses_today
FROM aggregated_emails
WHERE user_id = $1
""",
user_id, today
)
# Task counts
task_stats = await conn.fetchrow(
"""
SELECT
COUNT(*) as total_tasks,
COUNT(*) FILTER (WHERE status = 'pending') as pending_tasks,
COUNT(*) FILTER (WHERE status != 'completed' AND deadline < NOW()) as overdue_tasks
FROM inbox_tasks
WHERE user_id = $1
""",
user_id
)
return {
"total_accounts": len(accounts),
"active_accounts": sum(1 for a in accounts if a['status'] == 'active'),
"error_accounts": sum(1 for a in accounts if a['status'] == 'error'),
"total_emails": email_stats['total_emails'] or 0,
"unread_emails": email_stats['unread_emails'] or 0,
"total_tasks": task_stats['total_tasks'] or 0,
"pending_tasks": task_stats['pending_tasks'] or 0,
"overdue_tasks": task_stats['overdue_tasks'] or 0,
"emails_today": email_stats['emails_today'] or 0,
"ai_analyses_today": email_stats['ai_analyses_today'] or 0,
"per_account": [
{
"id": a['id'],
"email": a['email'],
"display_name": a['display_name'],
"status": a['status'],
"email_count": a['email_count'],
"unread_count": a['unread_count'],
"last_sync": a['last_sync'].isoformat() if a['last_sync'] else None,
}
for a in accounts
],
}
except Exception as e:
print(f"Failed to get mail stats: {e}")
return {}
async def log_mail_audit(
user_id: str,
action: str,
entity_type: Optional[str] = None,
entity_id: Optional[str] = None,
details: Optional[Dict] = None,
tenant_id: Optional[str] = None,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
) -> bool:
"""Log a mail action for audit trail."""
pool = await get_pool()
if pool is None:
return False
try:
async with pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO mail_audit_log
(id, user_id, tenant_id, action, entity_type, entity_id, details, ip_address, user_agent)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
""",
str(uuid.uuid4()), user_id, tenant_id, action, entity_type, entity_id,
json.dumps(details) if details else None, ip_address, user_agent
)
return True
except Exception as e:
print(f"Failed to log mail audit: {e}")
return False