A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
86 lines
2.3 KiB
Python
86 lines
2.3 KiB
Python
"""
|
|
BreakPilot LLM Gateway - Main Application
|
|
|
|
OpenAI-kompatibles API Gateway für Self-hosted LLMs.
|
|
"""
|
|
|
|
import logging
|
|
from contextlib import asynccontextmanager
|
|
from fastapi import FastAPI
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
from .config import get_config
|
|
from .routes import chat_router, playbooks_router, health_router, comparison_router, edu_search_seeds_router, communication_router
|
|
from .services.inference import get_inference_service
|
|
|
|
# Logging Setup
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
"""Lifecycle Management für den Gateway."""
|
|
logger.info("Starting LLM Gateway...")
|
|
config = get_config()
|
|
logger.info(f"Debug mode: {config.debug}")
|
|
logger.info(f"Backends configured: ollama={bool(config.ollama)}, vllm={bool(config.vllm)}, anthropic={bool(config.anthropic)}")
|
|
|
|
yield
|
|
|
|
# Cleanup
|
|
logger.info("Shutting down LLM Gateway...")
|
|
inference_service = get_inference_service()
|
|
await inference_service.close()
|
|
|
|
|
|
def create_app() -> FastAPI:
|
|
"""Factory Function für die FastAPI App."""
|
|
config = get_config()
|
|
|
|
app = FastAPI(
|
|
title="BreakPilot LLM Gateway",
|
|
description="OpenAI-kompatibles API Gateway für Self-hosted LLMs",
|
|
version="0.1.0",
|
|
lifespan=lifespan,
|
|
docs_url="/docs" if config.debug else None,
|
|
redoc_url="/redoc" if config.debug else None,
|
|
)
|
|
|
|
# CORS
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=["*"], # In Produktion einschränken
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
# Routes
|
|
app.include_router(health_router)
|
|
app.include_router(chat_router, prefix="/v1")
|
|
app.include_router(playbooks_router)
|
|
app.include_router(comparison_router, prefix="/v1")
|
|
app.include_router(edu_search_seeds_router, prefix="/v1")
|
|
app.include_router(communication_router, prefix="/v1")
|
|
|
|
return app
|
|
|
|
|
|
# App Instance für uvicorn
|
|
app = create_app()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
config = get_config()
|
|
uvicorn.run(
|
|
"llm_gateway.main:app",
|
|
host=config.host,
|
|
port=config.port,
|
|
reload=config.debug,
|
|
)
|