This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
breakpilot-pwa/backend/llm_gateway/routes/health.py
Benjamin Admin 21a844cb8a fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 09:51:32 +01:00

128 lines
3.9 KiB
Python

"""
Health Check Route.
"""
import logging
from datetime import datetime
from fastapi import APIRouter
from pydantic import BaseModel
from ..config import get_config
logger = logging.getLogger(__name__)
router = APIRouter(tags=["Health"])
class ComponentStatus(BaseModel):
"""Status einer Komponente."""
name: str
status: str # healthy, degraded, unhealthy
message: str = ""
class HealthResponse(BaseModel):
"""Health Check Response."""
status: str # ok, degraded, error
ts: str
version: str
components: list[ComponentStatus]
@router.get("/health", response_model=HealthResponse)
async def health_check():
"""
Health Check Endpoint.
Prüft den Status aller Komponenten:
- Gateway selbst
- LLM Backend Erreichbarkeit
- Datenbank (wenn konfiguriert)
"""
config = get_config()
components = []
overall_status = "ok"
# Gateway selbst
components.append(ComponentStatus(
name="gateway",
status="healthy",
message="Gateway is running",
))
# Ollama Backend
if config.ollama and config.ollama.enabled:
try:
import httpx
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(f"{config.ollama.base_url}/api/tags")
if response.status_code == 200:
components.append(ComponentStatus(
name="ollama",
status="healthy",
message="Ollama is reachable",
))
else:
components.append(ComponentStatus(
name="ollama",
status="degraded",
message=f"Ollama returned status {response.status_code}",
))
overall_status = "degraded"
except Exception as e:
components.append(ComponentStatus(
name="ollama",
status="unhealthy",
message=f"Cannot reach Ollama: {str(e)}",
))
# Nicht critical wenn andere Backends verfügbar
if not (config.vllm and config.vllm.enabled) and not (config.anthropic and config.anthropic.enabled):
overall_status = "error"
# vLLM Backend
if config.vllm and config.vllm.enabled:
try:
import httpx
headers = {}
if config.vllm.api_key:
headers["Authorization"] = f"Bearer {config.vllm.api_key}"
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(
f"{config.vllm.base_url}/v1/models",
headers=headers,
)
if response.status_code == 200:
components.append(ComponentStatus(
name="vllm",
status="healthy",
message="vLLM is reachable",
))
else:
components.append(ComponentStatus(
name="vllm",
status="degraded",
message=f"vLLM returned status {response.status_code}",
))
overall_status = "degraded"
except Exception as e:
components.append(ComponentStatus(
name="vllm",
status="unhealthy",
message=f"Cannot reach vLLM: {str(e)}",
))
# Anthropic Backend
if config.anthropic and config.anthropic.enabled:
components.append(ComponentStatus(
name="anthropic",
status="healthy",
message="Anthropic API configured (not checked)",
))
return HealthResponse(
status=overall_status,
ts=datetime.utcnow().isoformat() + "Z",
version="0.1.0",
components=components,
)