This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
breakpilot-pwa/backend/session/cleanup_job.py
Benjamin Admin 21a844cb8a fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 09:51:32 +01:00

142 lines
3.4 KiB
Python

"""
Session Cleanup Job
Removes expired sessions from PostgreSQL.
Valkey handles its own expiry via TTL.
This job should be run periodically (e.g., via cron or APScheduler).
Usage:
# Run directly
python -m session.cleanup_job
# Or import and call
from session.cleanup_job import run_cleanup
await run_cleanup()
"""
import asyncio
import logging
import os
from datetime import datetime, timezone
logger = logging.getLogger(__name__)
async def run_cleanup():
"""Run session cleanup job."""
from .session_store import get_session_store
logger.info("Starting session cleanup job...")
try:
store = await get_session_store()
count = await store.cleanup_expired_sessions()
logger.info(f"Session cleanup completed: removed {count} expired sessions")
return count
except Exception as e:
logger.error(f"Session cleanup failed: {e}")
raise
async def run_cleanup_with_pg():
"""
Run cleanup directly with PostgreSQL connection.
Useful when session store is not initialized.
"""
database_url = os.environ.get("DATABASE_URL")
if not database_url:
logger.warning("DATABASE_URL not set, skipping cleanup")
return 0
try:
import asyncpg
conn = await asyncpg.connect(database_url)
try:
# Delete sessions expired more than 7 days ago
result = await conn.execute("""
DELETE FROM user_sessions
WHERE expires_at < NOW() - INTERVAL '7 days'
""")
count = int(result.split()[-1]) if result else 0
logger.info(f"Session cleanup completed: removed {count} expired sessions")
return count
finally:
await conn.close()
except Exception as e:
logger.error(f"Session cleanup failed: {e}")
return 0
def setup_scheduler():
"""
Setup APScheduler for periodic cleanup.
Runs cleanup every 6 hours.
"""
try:
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.interval import IntervalTrigger
scheduler = AsyncIOScheduler()
scheduler.add_job(
run_cleanup,
trigger=IntervalTrigger(hours=6),
id="session_cleanup",
name="Session Cleanup Job",
replace_existing=True,
)
scheduler.start()
logger.info("Session cleanup scheduler started (runs every 6 hours)")
return scheduler
except ImportError:
logger.warning("APScheduler not installed, cleanup job not scheduled")
return None
def register_with_fastapi(app):
"""
Register cleanup job with FastAPI app lifecycle.
Usage:
from session.cleanup_job import register_with_fastapi
register_with_fastapi(app)
"""
from contextlib import asynccontextmanager
scheduler = None
@asynccontextmanager
async def lifespan(app):
nonlocal scheduler
# Startup
scheduler = setup_scheduler()
# Run initial cleanup
asyncio.create_task(run_cleanup())
yield
# Shutdown
if scheduler:
scheduler.shutdown()
return lifespan
if __name__ == "__main__":
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
# Run cleanup
asyncio.run(run_cleanup_with_pg())