fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
91
backend/api/tests/database.py
Normal file
91
backend/api/tests/database.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Database Configuration fuer Test Registry.
|
||||
|
||||
PostgreSQL-Anbindung fuer persistente Test-Speicherung.
|
||||
Ersetzt die bisherige JSON-basierte Speicherung.
|
||||
"""
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, Session, declarative_base
|
||||
|
||||
# Eigene Base fuer Test Registry - unabhaengig von anderen Modulen
|
||||
# Dies vermeidet Import-Probleme in CI/CD Umgebungen
|
||||
Base = declarative_base()
|
||||
|
||||
# Database URL from environment (nutzt gleiche DB wie Backend)
|
||||
_raw_url = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"postgresql://breakpilot:breakpilot123@postgres:5432/breakpilot_db"
|
||||
)
|
||||
# SQLAlchemy 2.0 erfordert "postgresql://" statt "postgres://"
|
||||
DATABASE_URL = _raw_url.replace("postgres://", "postgresql://", 1) if _raw_url.startswith("postgres://") else _raw_url
|
||||
|
||||
# Engine configuration mit Connection Pool
|
||||
engine = create_engine(
|
||||
DATABASE_URL,
|
||||
pool_pre_ping=True, # Prueft Connections vor Nutzung
|
||||
pool_size=5, # Standard Pool-Groesse
|
||||
max_overflow=10, # Zusaetzliche Connections bei Bedarf
|
||||
pool_recycle=3600, # Recycle nach 1 Stunde
|
||||
echo=os.getenv("SQL_ECHO", "false").lower() == "true"
|
||||
)
|
||||
|
||||
# Session factory
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def get_db():
|
||||
"""
|
||||
Database dependency for FastAPI endpoints.
|
||||
Yields a database session and ensures cleanup.
|
||||
"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_db_session():
|
||||
"""
|
||||
Context manager for database sessions.
|
||||
Use this for background tasks and non-FastAPI code.
|
||||
|
||||
Example:
|
||||
with get_db_session() as db:
|
||||
db.query(TestRun).all()
|
||||
"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
db.commit()
|
||||
except Exception:
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def init_db():
|
||||
"""
|
||||
Erstellt alle Tabellen.
|
||||
In Produktion sollte Alembic verwendet werden.
|
||||
"""
|
||||
from . import db_models # Import models to register them
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
def check_db_connection() -> bool:
|
||||
"""
|
||||
Prueft ob die Datenbankverbindung funktioniert.
|
||||
Nuetzlich fuer Health-Checks.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
with get_db_session() as db:
|
||||
db.execute(text("SELECT 1"))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
Reference in New Issue
Block a user