fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
192
geo-service/main.py
Normal file
192
geo-service/main.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""
|
||||
GeoEdu Service - Self-Hosted OSM + Terrain Learning Platform
|
||||
DSGVO-konforme Erdkunde-Lernplattform mit selbst gehostetem OpenStreetMap
|
||||
|
||||
Main FastAPI Application
|
||||
"""
|
||||
import structlog
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
import time
|
||||
|
||||
from config import settings
|
||||
|
||||
# Configure structured logging
|
||||
structlog.configure(
|
||||
processors=[
|
||||
structlog.stdlib.filter_by_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso"),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
structlog.processors.UnicodeDecoder(),
|
||||
structlog.processors.JSONRenderer() if not settings.is_development else structlog.dev.ConsoleRenderer(),
|
||||
],
|
||||
wrapper_class=structlog.stdlib.BoundLogger,
|
||||
context_class=dict,
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan manager."""
|
||||
# Startup
|
||||
logger.info(
|
||||
"Starting GeoEdu Service",
|
||||
environment=settings.environment,
|
||||
port=settings.port,
|
||||
)
|
||||
|
||||
# Check data directories
|
||||
import os
|
||||
for dir_name, dir_path in [
|
||||
("OSM Data", settings.osm_data_dir),
|
||||
("DEM Data", settings.dem_data_dir),
|
||||
("Tile Cache", settings.tile_cache_dir),
|
||||
("Bundles", settings.bundle_dir),
|
||||
]:
|
||||
if os.path.exists(dir_path):
|
||||
logger.info(f"{dir_name} directory exists", path=dir_path)
|
||||
else:
|
||||
logger.warning(f"{dir_name} directory missing", path=dir_path)
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down GeoEdu Service")
|
||||
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
title="GeoEdu Service",
|
||||
description="DSGVO-konforme Erdkunde-Lernplattform mit selbst gehostetem OpenStreetMap",
|
||||
version="1.0.0",
|
||||
docs_url="/docs" if settings.is_development else None,
|
||||
redoc_url="/redoc" if settings.is_development else None,
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.cors_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# Request timing middleware
|
||||
@app.middleware("http")
|
||||
async def add_timing_header(request: Request, call_next):
|
||||
"""Add X-Process-Time header to all responses."""
|
||||
start_time = time.time()
|
||||
response = await call_next(request)
|
||||
process_time = time.time() - start_time
|
||||
response.headers["X-Process-Time"] = str(process_time)
|
||||
return response
|
||||
|
||||
|
||||
# Import and register routers
|
||||
from api.tiles import router as tiles_router
|
||||
from api.terrain import router as terrain_router
|
||||
from api.aoi import router as aoi_router
|
||||
from api.learning import router as learning_router
|
||||
|
||||
app.include_router(tiles_router, prefix="/api/v1/tiles", tags=["Tiles"])
|
||||
app.include_router(terrain_router, prefix="/api/v1/terrain", tags=["Terrain"])
|
||||
app.include_router(aoi_router, prefix="/api/v1/aoi", tags=["AOI"])
|
||||
app.include_router(learning_router, prefix="/api/v1/learning", tags=["Learning"])
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health", tags=["System"])
|
||||
async def health_check():
|
||||
"""
|
||||
Health check endpoint for Docker/Kubernetes probes.
|
||||
Returns service status and basic metrics.
|
||||
"""
|
||||
import os
|
||||
|
||||
# Check data availability
|
||||
pmtiles_exists = os.path.exists(settings.pmtiles_path)
|
||||
dem_exists = os.path.exists(settings.dem_data_dir) and len(os.listdir(settings.dem_data_dir)) > 0
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "geo-service",
|
||||
"version": "1.0.0",
|
||||
"environment": settings.environment,
|
||||
"data_status": {
|
||||
"pmtiles_available": pmtiles_exists,
|
||||
"dem_available": dem_exists,
|
||||
"tile_cache_dir": os.path.exists(settings.tile_cache_dir),
|
||||
"bundle_dir": os.path.exists(settings.bundle_dir),
|
||||
},
|
||||
"config": {
|
||||
"max_aoi_size_km2": settings.max_aoi_size_km2,
|
||||
"supported_themes": settings.supported_themes,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Root endpoint
|
||||
@app.get("/", tags=["System"])
|
||||
async def root():
|
||||
"""Root endpoint with service information."""
|
||||
return {
|
||||
"service": "GeoEdu Service",
|
||||
"description": "DSGVO-konforme Erdkunde-Lernplattform",
|
||||
"version": "1.0.0",
|
||||
"docs": "/docs" if settings.is_development else "disabled",
|
||||
"endpoints": {
|
||||
"tiles": "/api/v1/tiles",
|
||||
"terrain": "/api/v1/terrain",
|
||||
"aoi": "/api/v1/aoi",
|
||||
"learning": "/api/v1/learning",
|
||||
},
|
||||
"attribution": {
|
||||
"osm": "© OpenStreetMap contributors (ODbL)",
|
||||
"dem": "© Copernicus Service (free, attribution required)",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Error handlers
|
||||
@app.exception_handler(404)
|
||||
async def not_found_handler(request: Request, exc):
|
||||
"""Handle 404 errors."""
|
||||
return JSONResponse(
|
||||
status_code=404,
|
||||
content={"error": "Not found", "path": str(request.url.path)},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(500)
|
||||
async def internal_error_handler(request: Request, exc):
|
||||
"""Handle 500 errors."""
|
||||
logger.error("Internal server error", path=str(request.url.path), error=str(exc))
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"error": "Internal server error"},
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=settings.port,
|
||||
reload=settings.is_development,
|
||||
)
|
||||
Reference in New Issue
Block a user