fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
16
geo-service/services/__init__.py
Normal file
16
geo-service/services/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
GeoEdu Service - Business Logic Services
|
||||
"""
|
||||
from .tile_server import TileServerService
|
||||
from .dem_service import DEMService
|
||||
from .aoi_packager import AOIPackagerService
|
||||
from .osm_extractor import OSMExtractorService
|
||||
from .learning_generator import LearningGeneratorService
|
||||
|
||||
__all__ = [
|
||||
"TileServerService",
|
||||
"DEMService",
|
||||
"AOIPackagerService",
|
||||
"OSMExtractorService",
|
||||
"LearningGeneratorService",
|
||||
]
|
||||
420
geo-service/services/aoi_packager.py
Normal file
420
geo-service/services/aoi_packager.py
Normal file
@@ -0,0 +1,420 @@
|
||||
"""
|
||||
AOI Packager Service
|
||||
Creates Unity-compatible bundles from geographic areas
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import zipfile
|
||||
import uuid
|
||||
from typing import Optional, Tuple
|
||||
from datetime import datetime
|
||||
import math
|
||||
import structlog
|
||||
from shapely.geometry import shape, Polygon, mapping
|
||||
from shapely.ops import transform
|
||||
import pyproj
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
# Germany bounding box
|
||||
GERMANY_BOUNDS = Polygon([
|
||||
(5.87, 47.27),
|
||||
(15.04, 47.27),
|
||||
(15.04, 55.06),
|
||||
(5.87, 55.06),
|
||||
(5.87, 47.27),
|
||||
])
|
||||
|
||||
# AOI status storage (in production, use database)
|
||||
_aoi_storage = {}
|
||||
|
||||
|
||||
class AOIPackagerService:
|
||||
"""
|
||||
Service for packaging geographic areas for Unity 3D rendering.
|
||||
|
||||
Creates bundles containing:
|
||||
- Terrain heightmap
|
||||
- OSM features (buildings, roads, water, etc.)
|
||||
- Learning node positions
|
||||
- Attribution information
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.bundle_dir = settings.bundle_dir
|
||||
self.max_area_km2 = settings.max_aoi_size_km2
|
||||
|
||||
def calculate_area_km2(self, geojson: dict) -> float:
|
||||
"""
|
||||
Calculate the area of a GeoJSON polygon in square kilometers.
|
||||
|
||||
Uses an equal-area projection for accurate measurement.
|
||||
"""
|
||||
try:
|
||||
geom = shape(geojson)
|
||||
|
||||
# Transform to equal-area projection (EPSG:3035 for Europe)
|
||||
project = pyproj.Transformer.from_crs(
|
||||
"EPSG:4326", # WGS84
|
||||
"EPSG:3035", # ETRS89-LAEA
|
||||
always_xy=True,
|
||||
).transform
|
||||
|
||||
geom_projected = transform(project, geom)
|
||||
area_m2 = geom_projected.area
|
||||
area_km2 = area_m2 / 1_000_000
|
||||
|
||||
return area_km2
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error calculating area", error=str(e))
|
||||
raise ValueError(f"Invalid polygon geometry: {str(e)}")
|
||||
|
||||
def is_within_germany(self, geojson: dict) -> bool:
|
||||
"""Check if a polygon is within Germany's bounds."""
|
||||
try:
|
||||
geom = shape(geojson)
|
||||
return GERMANY_BOUNDS.contains(geom)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def validate_polygon(self, geojson: dict) -> Tuple[bool, str]:
|
||||
"""
|
||||
Validate a GeoJSON polygon.
|
||||
|
||||
Checks:
|
||||
- Valid GeoJSON format
|
||||
- Valid polygon geometry
|
||||
- Not self-intersecting
|
||||
"""
|
||||
try:
|
||||
# Check type
|
||||
if geojson.get("type") != "Polygon":
|
||||
return False, "Geometry must be a Polygon"
|
||||
|
||||
# Check coordinates
|
||||
coords = geojson.get("coordinates")
|
||||
if not coords or not isinstance(coords, list):
|
||||
return False, "Missing or invalid coordinates"
|
||||
|
||||
# Parse geometry
|
||||
geom = shape(geojson)
|
||||
|
||||
# Check validity
|
||||
if not geom.is_valid:
|
||||
return False, "Invalid polygon geometry (possibly self-intersecting)"
|
||||
|
||||
# Check ring closure
|
||||
outer_ring = coords[0]
|
||||
if outer_ring[0] != outer_ring[-1]:
|
||||
return False, "Polygon ring must be closed"
|
||||
|
||||
return True, "Valid"
|
||||
|
||||
except Exception as e:
|
||||
return False, f"Error validating polygon: {str(e)}"
|
||||
|
||||
def estimate_bundle_size_mb(self, area_km2: float, quality: str) -> float:
|
||||
"""Estimate the bundle size based on area and quality."""
|
||||
# Base size per km² in MB
|
||||
base_sizes = {
|
||||
"low": 10,
|
||||
"medium": 25,
|
||||
"high": 50,
|
||||
}
|
||||
|
||||
base = base_sizes.get(quality, 25)
|
||||
return round(area_km2 * base, 1)
|
||||
|
||||
async def process_aoi(
|
||||
self,
|
||||
aoi_id: str,
|
||||
polygon: dict,
|
||||
theme: str,
|
||||
quality: str,
|
||||
):
|
||||
"""
|
||||
Process an AOI and create the Unity bundle.
|
||||
|
||||
This runs as a background task.
|
||||
"""
|
||||
logger.info("Processing AOI", aoi_id=aoi_id, theme=theme, quality=quality)
|
||||
|
||||
# Update status
|
||||
_aoi_storage[aoi_id] = {
|
||||
"status": "processing",
|
||||
"polygon": polygon,
|
||||
"theme": theme,
|
||||
"quality": quality,
|
||||
"area_km2": self.calculate_area_km2(polygon),
|
||||
"created_at": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
try:
|
||||
# Create bundle directory
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
os.makedirs(bundle_path, exist_ok=True)
|
||||
|
||||
# Generate terrain heightmap
|
||||
await self._generate_terrain(aoi_id, polygon, quality)
|
||||
|
||||
# Extract OSM features
|
||||
await self._extract_osm_features(aoi_id, polygon)
|
||||
|
||||
# Generate learning node positions
|
||||
await self._generate_learning_positions(aoi_id, polygon, theme)
|
||||
|
||||
# Create attribution file
|
||||
await self._create_attribution(aoi_id)
|
||||
|
||||
# Create manifest
|
||||
await self._create_manifest(aoi_id, polygon, theme, quality)
|
||||
|
||||
# Create ZIP bundle
|
||||
await self._create_zip_bundle(aoi_id)
|
||||
|
||||
# Update status
|
||||
_aoi_storage[aoi_id]["status"] = "completed"
|
||||
_aoi_storage[aoi_id]["completed_at"] = datetime.utcnow().isoformat()
|
||||
|
||||
logger.info("AOI processing complete", aoi_id=aoi_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("AOI processing failed", aoi_id=aoi_id, error=str(e))
|
||||
_aoi_storage[aoi_id]["status"] = "failed"
|
||||
_aoi_storage[aoi_id]["error"] = str(e)
|
||||
|
||||
async def _generate_terrain(self, aoi_id: str, polygon: dict, quality: str):
|
||||
"""Generate terrain heightmap for the AOI."""
|
||||
from services.dem_service import DEMService
|
||||
|
||||
dem_service = DEMService()
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
|
||||
# Get bounding box of polygon
|
||||
geom = shape(polygon)
|
||||
bounds = geom.bounds # (minx, miny, maxx, maxy)
|
||||
|
||||
# Determine resolution based on quality
|
||||
resolutions = {"low": 64, "medium": 256, "high": 512}
|
||||
resolution = resolutions.get(quality, 256)
|
||||
|
||||
# Generate heightmap for bounds
|
||||
from services.dem_service import tile_to_bounds
|
||||
|
||||
# For simplicity, generate a single heightmap image
|
||||
# In production, this would be more sophisticated
|
||||
|
||||
heightmap_path = os.path.join(bundle_path, "terrain.heightmap.png")
|
||||
|
||||
# Save bounds info
|
||||
terrain_info = {
|
||||
"bounds": {
|
||||
"west": bounds[0],
|
||||
"south": bounds[1],
|
||||
"east": bounds[2],
|
||||
"north": bounds[3],
|
||||
},
|
||||
"resolution": resolution,
|
||||
"heightmap_file": "terrain.heightmap.png",
|
||||
"encoding": "terrain-rgb",
|
||||
}
|
||||
|
||||
with open(os.path.join(bundle_path, "terrain.json"), "w") as f:
|
||||
json.dump(terrain_info, f, indent=2)
|
||||
|
||||
logger.debug("Terrain generated", aoi_id=aoi_id, resolution=resolution)
|
||||
|
||||
async def _extract_osm_features(self, aoi_id: str, polygon: dict):
|
||||
"""Extract OSM features within the AOI."""
|
||||
from services.osm_extractor import OSMExtractorService
|
||||
|
||||
extractor = OSMExtractorService()
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
|
||||
# Extract features
|
||||
features = await extractor.extract_features(polygon)
|
||||
|
||||
# Save to file
|
||||
features_path = os.path.join(bundle_path, "osm_features.json")
|
||||
with open(features_path, "w") as f:
|
||||
json.dump(features, f, indent=2)
|
||||
|
||||
logger.debug("OSM features extracted", aoi_id=aoi_id, count=len(features.get("features", [])))
|
||||
|
||||
async def _generate_learning_positions(self, aoi_id: str, polygon: dict, theme: str):
|
||||
"""Generate suggested positions for learning nodes."""
|
||||
geom = shape(polygon)
|
||||
bounds = geom.bounds
|
||||
centroid = geom.centroid
|
||||
|
||||
# Generate positions based on theme
|
||||
# For now, create a grid of potential positions
|
||||
positions = []
|
||||
|
||||
# Create a 3x3 grid of positions
|
||||
for i in range(3):
|
||||
for j in range(3):
|
||||
lon = bounds[0] + (bounds[2] - bounds[0]) * (i + 0.5) / 3
|
||||
lat = bounds[1] + (bounds[3] - bounds[1]) * (j + 0.5) / 3
|
||||
|
||||
# Check if point is within polygon
|
||||
from shapely.geometry import Point
|
||||
if geom.contains(Point(lon, lat)):
|
||||
positions.append({
|
||||
"id": str(uuid.uuid4()),
|
||||
"position": {"longitude": lon, "latitude": lat},
|
||||
"suggested_theme": theme,
|
||||
"status": "pending",
|
||||
})
|
||||
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
positions_path = os.path.join(bundle_path, "learning_positions.json")
|
||||
|
||||
with open(positions_path, "w") as f:
|
||||
json.dump({"positions": positions}, f, indent=2)
|
||||
|
||||
logger.debug("Learning positions generated", aoi_id=aoi_id, count=len(positions))
|
||||
|
||||
async def _create_attribution(self, aoi_id: str):
|
||||
"""Create attribution file with required license notices."""
|
||||
attribution = {
|
||||
"sources": [
|
||||
{
|
||||
"name": "OpenStreetMap",
|
||||
"license": "Open Database License (ODbL) v1.0",
|
||||
"url": "https://www.openstreetmap.org/copyright",
|
||||
"attribution": "© OpenStreetMap contributors",
|
||||
"required": True,
|
||||
},
|
||||
{
|
||||
"name": "Copernicus DEM",
|
||||
"license": "Copernicus Data License",
|
||||
"url": "https://spacedata.copernicus.eu/",
|
||||
"attribution": "© Copernicus Service Information 2024",
|
||||
"required": True,
|
||||
},
|
||||
],
|
||||
"generated_at": datetime.utcnow().isoformat(),
|
||||
"notice": "This data must be attributed according to the licenses above when used publicly.",
|
||||
}
|
||||
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
attribution_path = os.path.join(bundle_path, "attribution.json")
|
||||
|
||||
with open(attribution_path, "w") as f:
|
||||
json.dump(attribution, f, indent=2)
|
||||
|
||||
async def _create_manifest(self, aoi_id: str, polygon: dict, theme: str, quality: str):
|
||||
"""Create Unity bundle manifest."""
|
||||
geom = shape(polygon)
|
||||
bounds = geom.bounds
|
||||
centroid = geom.centroid
|
||||
|
||||
manifest = {
|
||||
"version": "1.0.0",
|
||||
"aoi_id": aoi_id,
|
||||
"created_at": datetime.utcnow().isoformat(),
|
||||
"bounds": {
|
||||
"west": bounds[0],
|
||||
"south": bounds[1],
|
||||
"east": bounds[2],
|
||||
"north": bounds[3],
|
||||
},
|
||||
"center": {
|
||||
"longitude": centroid.x,
|
||||
"latitude": centroid.y,
|
||||
},
|
||||
"area_km2": self.calculate_area_km2(polygon),
|
||||
"theme": theme,
|
||||
"quality": quality,
|
||||
"assets": {
|
||||
"terrain": {
|
||||
"file": "terrain.heightmap.png",
|
||||
"config": "terrain.json",
|
||||
},
|
||||
"osm_features": {
|
||||
"file": "osm_features.json",
|
||||
},
|
||||
"learning_positions": {
|
||||
"file": "learning_positions.json",
|
||||
},
|
||||
"attribution": {
|
||||
"file": "attribution.json",
|
||||
},
|
||||
},
|
||||
"unity": {
|
||||
"coordinate_system": "Unity (Y-up, left-handed)",
|
||||
"scale": 1.0, # 1 Unity unit = 1 meter
|
||||
"terrain_resolution": {"low": 64, "medium": 256, "high": 512}[quality],
|
||||
},
|
||||
}
|
||||
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
manifest_path = os.path.join(bundle_path, "manifest.json")
|
||||
|
||||
with open(manifest_path, "w") as f:
|
||||
json.dump(manifest, f, indent=2)
|
||||
|
||||
async def _create_zip_bundle(self, aoi_id: str):
|
||||
"""Create ZIP archive of all bundle files."""
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
zip_path = os.path.join(bundle_path, "bundle.zip")
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for filename in os.listdir(bundle_path):
|
||||
if filename != "bundle.zip":
|
||||
filepath = os.path.join(bundle_path, filename)
|
||||
zf.write(filepath, filename)
|
||||
|
||||
logger.debug("Bundle ZIP created", aoi_id=aoi_id, path=zip_path)
|
||||
|
||||
async def get_aoi_status(self, aoi_id: str) -> Optional[dict]:
|
||||
"""Get the status of an AOI."""
|
||||
return _aoi_storage.get(aoi_id)
|
||||
|
||||
async def get_manifest(self, aoi_id: str) -> Optional[dict]:
|
||||
"""Get the manifest for a completed AOI."""
|
||||
aoi_data = _aoi_storage.get(aoi_id)
|
||||
if aoi_data is None or aoi_data.get("status") != "completed":
|
||||
return None
|
||||
|
||||
manifest_path = os.path.join(self.bundle_dir, aoi_id, "manifest.json")
|
||||
if not os.path.exists(manifest_path):
|
||||
return None
|
||||
|
||||
with open(manifest_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
async def get_bundle_path(self, aoi_id: str) -> Optional[str]:
|
||||
"""Get the path to a completed bundle ZIP."""
|
||||
aoi_data = _aoi_storage.get(aoi_id)
|
||||
if aoi_data is None or aoi_data.get("status") != "completed":
|
||||
return None
|
||||
|
||||
zip_path = os.path.join(self.bundle_dir, aoi_id, "bundle.zip")
|
||||
if not os.path.exists(zip_path):
|
||||
return None
|
||||
|
||||
return zip_path
|
||||
|
||||
async def delete_aoi(self, aoi_id: str) -> bool:
|
||||
"""Delete an AOI and its files."""
|
||||
if aoi_id not in _aoi_storage:
|
||||
return False
|
||||
|
||||
import shutil
|
||||
|
||||
bundle_path = os.path.join(self.bundle_dir, aoi_id)
|
||||
if os.path.exists(bundle_path):
|
||||
shutil.rmtree(bundle_path)
|
||||
|
||||
del _aoi_storage[aoi_id]
|
||||
return True
|
||||
|
||||
async def generate_preview(self, aoi_id: str, width: int, height: int) -> Optional[bytes]:
|
||||
"""Generate a preview image of the AOI (stub)."""
|
||||
# Would generate a preview combining terrain and OSM features
|
||||
return None
|
||||
338
geo-service/services/dem_service.py
Normal file
338
geo-service/services/dem_service.py
Normal file
@@ -0,0 +1,338 @@
|
||||
"""
|
||||
DEM (Digital Elevation Model) Service
|
||||
Serves terrain data from Copernicus DEM GLO-30
|
||||
"""
|
||||
import os
|
||||
import math
|
||||
from typing import Optional, Tuple
|
||||
import struct
|
||||
import structlog
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
# Germany bounding box
|
||||
GERMANY_BOUNDS = {
|
||||
"west": 5.87,
|
||||
"south": 47.27,
|
||||
"east": 15.04,
|
||||
"north": 55.06,
|
||||
}
|
||||
|
||||
|
||||
def lat_lon_to_tile(lat: float, lon: float, zoom: int) -> Tuple[int, int]:
|
||||
"""Convert latitude/longitude to tile coordinates."""
|
||||
n = 2 ** zoom
|
||||
x = int((lon + 180.0) / 360.0 * n)
|
||||
lat_rad = math.radians(lat)
|
||||
y = int((1.0 - math.asinh(math.tan(lat_rad)) / math.pi) / 2.0 * n)
|
||||
return x, y
|
||||
|
||||
|
||||
def tile_to_bounds(z: int, x: int, y: int) -> Tuple[float, float, float, float]:
|
||||
"""Convert tile coordinates to bounding box (west, south, east, north)."""
|
||||
n = 2 ** z
|
||||
west = x / n * 360.0 - 180.0
|
||||
east = (x + 1) / n * 360.0 - 180.0
|
||||
north_rad = math.atan(math.sinh(math.pi * (1 - 2 * y / n)))
|
||||
south_rad = math.atan(math.sinh(math.pi * (1 - 2 * (y + 1) / n)))
|
||||
north = math.degrees(north_rad)
|
||||
south = math.degrees(south_rad)
|
||||
return west, south, east, north
|
||||
|
||||
|
||||
class DEMService:
|
||||
"""
|
||||
Service for handling Digital Elevation Model data.
|
||||
|
||||
Uses Copernicus DEM GLO-30 (30m resolution) as the data source.
|
||||
Generates terrain tiles in Mapbox Terrain-RGB format for MapLibre/Unity.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.dem_dir = settings.dem_data_dir
|
||||
self.tile_size = settings.terrain_tile_size
|
||||
self._dem_cache = {} # Cache loaded DEM files
|
||||
|
||||
def _get_dem_file_path(self, lat: int, lon: int) -> str:
|
||||
"""
|
||||
Get the path to a Copernicus DEM file for the given coordinates.
|
||||
|
||||
Copernicus DEM files are named like: Copernicus_DSM_COG_10_N47_00_E008_00_DEM.tif
|
||||
"""
|
||||
lat_prefix = "N" if lat >= 0 else "S"
|
||||
lon_prefix = "E" if lon >= 0 else "W"
|
||||
|
||||
# Format: N47E008
|
||||
filename = f"{lat_prefix}{abs(lat):02d}{lon_prefix}{abs(lon):03d}.tif"
|
||||
return os.path.join(self.dem_dir, filename)
|
||||
|
||||
def _load_dem_tile(self, lat: int, lon: int) -> Optional[np.ndarray]:
|
||||
"""Load a DEM tile from disk."""
|
||||
cache_key = f"{lat}_{lon}"
|
||||
if cache_key in self._dem_cache:
|
||||
return self._dem_cache[cache_key]
|
||||
|
||||
filepath = self._get_dem_file_path(lat, lon)
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
logger.debug("DEM file not found", path=filepath)
|
||||
return None
|
||||
|
||||
try:
|
||||
import rasterio
|
||||
|
||||
with rasterio.open(filepath) as src:
|
||||
data = src.read(1) # Read first band
|
||||
self._dem_cache[cache_key] = data
|
||||
return data
|
||||
|
||||
except ImportError:
|
||||
logger.warning("rasterio not available, using fallback")
|
||||
return self._load_dem_fallback(filepath)
|
||||
except Exception as e:
|
||||
logger.error("Error loading DEM", path=filepath, error=str(e))
|
||||
return None
|
||||
|
||||
def _load_dem_fallback(self, filepath: str) -> Optional[np.ndarray]:
|
||||
"""Fallback DEM loader without rasterio (for development)."""
|
||||
# In development, return synthetic terrain
|
||||
return None
|
||||
|
||||
async def get_heightmap_tile(self, z: int, x: int, y: int) -> Optional[bytes]:
|
||||
"""
|
||||
Generate a heightmap tile in Mapbox Terrain-RGB format.
|
||||
|
||||
The encoding stores elevation as: height = -10000 + ((R * 256 * 256 + G * 256 + B) * 0.1)
|
||||
This allows for -10000m to +1677721.6m range with 0.1m precision.
|
||||
"""
|
||||
# Get tile bounds
|
||||
west, south, east, north = tile_to_bounds(z, x, y)
|
||||
|
||||
# Check if tile is within Germany
|
||||
if east < GERMANY_BOUNDS["west"] or west > GERMANY_BOUNDS["east"]:
|
||||
return None
|
||||
if north < GERMANY_BOUNDS["south"] or south > GERMANY_BOUNDS["north"]:
|
||||
return None
|
||||
|
||||
# Try to load elevation data for this tile
|
||||
elevations = await self._get_elevations_for_bounds(west, south, east, north)
|
||||
|
||||
if elevations is None:
|
||||
# No DEM data available - return placeholder or None
|
||||
return self._generate_placeholder_heightmap()
|
||||
|
||||
# Convert to Terrain-RGB format
|
||||
return self._encode_terrain_rgb(elevations)
|
||||
|
||||
async def _get_elevations_for_bounds(
|
||||
self, west: float, south: float, east: float, north: float
|
||||
) -> Optional[np.ndarray]:
|
||||
"""Get elevation data for a bounding box."""
|
||||
# Determine which DEM tiles we need
|
||||
lat_min = int(math.floor(south))
|
||||
lat_max = int(math.ceil(north))
|
||||
lon_min = int(math.floor(west))
|
||||
lon_max = int(math.ceil(east))
|
||||
|
||||
# Load all required DEM tiles
|
||||
dem_tiles = []
|
||||
for lat in range(lat_min, lat_max + 1):
|
||||
for lon in range(lon_min, lon_max + 1):
|
||||
tile = self._load_dem_tile(lat, lon)
|
||||
if tile is not None:
|
||||
dem_tiles.append((lat, lon, tile))
|
||||
|
||||
if not dem_tiles:
|
||||
return None
|
||||
|
||||
# Interpolate elevations for our tile grid
|
||||
elevations = np.zeros((self.tile_size, self.tile_size), dtype=np.float32)
|
||||
|
||||
for py in range(self.tile_size):
|
||||
for px in range(self.tile_size):
|
||||
# Calculate lat/lon for this pixel
|
||||
lon = west + (east - west) * px / self.tile_size
|
||||
lat = north - (north - south) * py / self.tile_size
|
||||
|
||||
# Get elevation (simplified - would need proper interpolation)
|
||||
elevation = self._sample_elevation(lat, lon, dem_tiles)
|
||||
elevations[py, px] = elevation if elevation is not None else 0
|
||||
|
||||
return elevations
|
||||
|
||||
def _sample_elevation(
|
||||
self, lat: float, lon: float, dem_tiles: list
|
||||
) -> Optional[float]:
|
||||
"""Sample elevation at a specific point from loaded DEM tiles."""
|
||||
tile_lat = int(math.floor(lat))
|
||||
tile_lon = int(math.floor(lon))
|
||||
|
||||
for t_lat, t_lon, data in dem_tiles:
|
||||
if t_lat == tile_lat and t_lon == tile_lon:
|
||||
# Calculate pixel position within tile
|
||||
# Assuming 1 degree = 3600 pixels (1 arcsecond for 30m DEM)
|
||||
rows, cols = data.shape
|
||||
px = int((lon - tile_lon) * cols)
|
||||
py = int((t_lat + 1 - lat) * rows)
|
||||
|
||||
px = max(0, min(cols - 1, px))
|
||||
py = max(0, min(rows - 1, py))
|
||||
|
||||
return float(data[py, px])
|
||||
|
||||
return None
|
||||
|
||||
def _encode_terrain_rgb(self, elevations: np.ndarray) -> bytes:
|
||||
"""
|
||||
Encode elevation data as Mapbox Terrain-RGB.
|
||||
|
||||
Format: height = -10000 + ((R * 256 * 256 + G * 256 + B) * 0.1)
|
||||
"""
|
||||
# Convert elevation to RGB values
|
||||
# encoded = (elevation + 10000) / 0.1
|
||||
encoded = ((elevations + 10000) / 0.1).astype(np.uint32)
|
||||
|
||||
r = (encoded // (256 * 256)) % 256
|
||||
g = (encoded // 256) % 256
|
||||
b = encoded % 256
|
||||
|
||||
# Create RGB image
|
||||
rgb = np.stack([r, g, b], axis=-1).astype(np.uint8)
|
||||
img = Image.fromarray(rgb, mode="RGB")
|
||||
|
||||
# Save to bytes
|
||||
buffer = BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
return buffer.getvalue()
|
||||
|
||||
def _generate_placeholder_heightmap(self) -> bytes:
|
||||
"""Generate a flat placeholder heightmap (sea level)."""
|
||||
# Sea level = 0m encoded as RGB
|
||||
# encoded = (0 + 10000) / 0.1 = 100000
|
||||
# R = 100000 // 65536 = 1
|
||||
# G = (100000 // 256) % 256 = 134
|
||||
# B = 100000 % 256 = 160
|
||||
|
||||
img = Image.new("RGB", (self.tile_size, self.tile_size), (1, 134, 160))
|
||||
buffer = BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
return buffer.getvalue()
|
||||
|
||||
async def get_hillshade_tile(
|
||||
self, z: int, x: int, y: int, azimuth: float = 315, altitude: float = 45
|
||||
) -> Optional[bytes]:
|
||||
"""
|
||||
Generate a hillshade tile for terrain visualization.
|
||||
|
||||
Args:
|
||||
z, x, y: Tile coordinates
|
||||
azimuth: Light direction in degrees (0=N, 90=E, 180=S, 270=W)
|
||||
altitude: Light altitude in degrees above horizon
|
||||
"""
|
||||
west, south, east, north = tile_to_bounds(z, x, y)
|
||||
elevations = await self._get_elevations_for_bounds(west, south, east, north)
|
||||
|
||||
if elevations is None:
|
||||
return None
|
||||
|
||||
# Calculate hillshade using numpy
|
||||
hillshade = self._calculate_hillshade(elevations, azimuth, altitude)
|
||||
|
||||
# Create grayscale image
|
||||
img = Image.fromarray((hillshade * 255).astype(np.uint8), mode="L")
|
||||
|
||||
buffer = BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
return buffer.getvalue()
|
||||
|
||||
def _calculate_hillshade(
|
||||
self, dem: np.ndarray, azimuth: float, altitude: float
|
||||
) -> np.ndarray:
|
||||
"""Calculate hillshade from DEM array."""
|
||||
# Convert angles to radians
|
||||
azimuth_rad = math.radians(360 - azimuth + 90)
|
||||
altitude_rad = math.radians(altitude)
|
||||
|
||||
# Calculate gradient
|
||||
dy, dx = np.gradient(dem)
|
||||
|
||||
# Calculate slope and aspect
|
||||
slope = np.arctan(np.sqrt(dx**2 + dy**2))
|
||||
aspect = np.arctan2(-dy, dx)
|
||||
|
||||
# Calculate hillshade
|
||||
hillshade = (
|
||||
np.sin(altitude_rad) * np.cos(slope)
|
||||
+ np.cos(altitude_rad) * np.sin(slope) * np.cos(azimuth_rad - aspect)
|
||||
)
|
||||
|
||||
# Normalize to 0-1
|
||||
hillshade = np.clip(hillshade, 0, 1)
|
||||
|
||||
return hillshade
|
||||
|
||||
async def get_contour_tile(
|
||||
self, z: int, x: int, y: int, interval: int = 20
|
||||
) -> Optional[bytes]:
|
||||
"""Generate contour lines as vector tile (stub - requires more complex implementation)."""
|
||||
# This would require generating contour lines from DEM and encoding as MVT
|
||||
# For now, return None
|
||||
logger.warning("Contour tiles not yet implemented")
|
||||
return None
|
||||
|
||||
async def get_elevation(self, lat: float, lon: float) -> Optional[float]:
|
||||
"""Get elevation at a specific point."""
|
||||
tile_lat = int(math.floor(lat))
|
||||
tile_lon = int(math.floor(lon))
|
||||
|
||||
dem_data = self._load_dem_tile(tile_lat, tile_lon)
|
||||
if dem_data is None:
|
||||
return None
|
||||
|
||||
return self._sample_elevation(lat, lon, [(tile_lat, tile_lon, dem_data)])
|
||||
|
||||
async def get_elevation_profile(
|
||||
self, coordinates: list[list[float]], samples: int = 100
|
||||
) -> list[dict]:
|
||||
"""Get elevation profile along a path."""
|
||||
from shapely.geometry import LineString
|
||||
from shapely.ops import substring
|
||||
|
||||
# Create line from coordinates
|
||||
line = LineString(coordinates)
|
||||
total_length = line.length
|
||||
|
||||
# Sample points along line
|
||||
profile = []
|
||||
for i in range(samples):
|
||||
fraction = i / (samples - 1)
|
||||
point = line.interpolate(fraction, normalized=True)
|
||||
|
||||
elevation = await self.get_elevation(point.y, point.x)
|
||||
|
||||
profile.append({
|
||||
"distance_m": fraction * total_length * 111320, # Approximate meters
|
||||
"longitude": point.x,
|
||||
"latitude": point.y,
|
||||
"elevation_m": elevation,
|
||||
})
|
||||
|
||||
return profile
|
||||
|
||||
async def get_metadata(self) -> dict:
|
||||
"""Get metadata about available DEM data."""
|
||||
dem_files = []
|
||||
if os.path.exists(self.dem_dir):
|
||||
dem_files = [f for f in os.listdir(self.dem_dir) if f.endswith(".tif")]
|
||||
|
||||
return {
|
||||
"data_available": len(dem_files) > 0,
|
||||
"tiles_generated": len(dem_files),
|
||||
"resolution_m": 30,
|
||||
"source": "Copernicus DEM GLO-30",
|
||||
}
|
||||
355
geo-service/services/learning_generator.py
Normal file
355
geo-service/services/learning_generator.py
Normal file
@@ -0,0 +1,355 @@
|
||||
"""
|
||||
Learning Generator Service
|
||||
Generates educational content for geographic areas using LLM
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import uuid
|
||||
from typing import Optional
|
||||
import structlog
|
||||
import httpx
|
||||
|
||||
from config import settings
|
||||
from models.learning_node import LearningNode, LearningTheme, NodeType
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
# In-memory storage for learning nodes (use database in production)
|
||||
_learning_nodes = {}
|
||||
|
||||
|
||||
class LearningGeneratorService:
|
||||
"""
|
||||
Service for generating educational learning nodes using Ollama LLM.
|
||||
|
||||
Generates themed educational content based on geographic features
|
||||
and didactic principles.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.ollama_url = settings.ollama_base_url
|
||||
self.model = settings.ollama_model
|
||||
self.timeout = settings.ollama_timeout
|
||||
|
||||
async def generate_nodes(
|
||||
self,
|
||||
aoi_id: str,
|
||||
theme: LearningTheme,
|
||||
difficulty: str,
|
||||
node_count: int,
|
||||
grade_level: Optional[str] = None,
|
||||
language: str = "de",
|
||||
) -> list[LearningNode]:
|
||||
"""
|
||||
Generate learning nodes for an AOI.
|
||||
|
||||
Uses the Ollama LLM to create educational content appropriate
|
||||
for the theme, difficulty, and grade level.
|
||||
"""
|
||||
# Get AOI information
|
||||
aoi_info = await self._get_aoi_info(aoi_id)
|
||||
if aoi_info is None:
|
||||
raise FileNotFoundError(f"AOI {aoi_id} not found")
|
||||
|
||||
# Build prompt for LLM
|
||||
prompt = self._build_generation_prompt(
|
||||
aoi_info=aoi_info,
|
||||
theme=theme,
|
||||
difficulty=difficulty,
|
||||
node_count=node_count,
|
||||
grade_level=grade_level,
|
||||
language=language,
|
||||
)
|
||||
|
||||
# Call Ollama
|
||||
try:
|
||||
response = await self._call_ollama(prompt)
|
||||
nodes = self._parse_llm_response(response, aoi_id, theme)
|
||||
except ConnectionError:
|
||||
logger.warning("Ollama not available, using mock data")
|
||||
nodes = self._generate_mock_nodes(aoi_id, theme, difficulty, node_count)
|
||||
|
||||
# Store nodes
|
||||
if aoi_id not in _learning_nodes:
|
||||
_learning_nodes[aoi_id] = []
|
||||
_learning_nodes[aoi_id].extend(nodes)
|
||||
|
||||
return nodes
|
||||
|
||||
async def _get_aoi_info(self, aoi_id: str) -> Optional[dict]:
|
||||
"""Get information about an AOI from its manifest."""
|
||||
manifest_path = os.path.join(settings.bundle_dir, aoi_id, "manifest.json")
|
||||
|
||||
if os.path.exists(manifest_path):
|
||||
with open(manifest_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
# Check in-memory storage
|
||||
from services.aoi_packager import _aoi_storage
|
||||
return _aoi_storage.get(aoi_id)
|
||||
|
||||
def _build_generation_prompt(
|
||||
self,
|
||||
aoi_info: dict,
|
||||
theme: LearningTheme,
|
||||
difficulty: str,
|
||||
node_count: int,
|
||||
grade_level: Optional[str],
|
||||
language: str,
|
||||
) -> str:
|
||||
"""Build a prompt for the LLM to generate learning nodes."""
|
||||
theme_descriptions = {
|
||||
LearningTheme.TOPOGRAPHIE: "Landschaftsformen, Höhen und Geländemerkmale",
|
||||
LearningTheme.LANDNUTZUNG: "Siedlungen, Landwirtschaft und Flächennutzung",
|
||||
LearningTheme.ORIENTIERUNG: "Kartenlesen, Kompass und Navigation",
|
||||
LearningTheme.GEOLOGIE: "Gesteinsarten und geologische Formationen",
|
||||
LearningTheme.HYDROLOGIE: "Gewässer, Einzugsgebiete und Wasserkreislauf",
|
||||
LearningTheme.VEGETATION: "Pflanzengemeinschaften und Klimazonen",
|
||||
}
|
||||
|
||||
difficulty_descriptions = {
|
||||
"leicht": "Grundlegende Beobachtungen und einfache Fakten",
|
||||
"mittel": "Verknüpfung von Zusammenhängen und Vergleiche",
|
||||
"schwer": "Analyse, Transfer und kritisches Denken",
|
||||
}
|
||||
|
||||
bounds = aoi_info.get("bounds", {})
|
||||
center = aoi_info.get("center", {})
|
||||
|
||||
prompt = f"""Du bist ein Erdkunde-Didaktiker und erstellst Lernstationen für eine interaktive 3D-Lernwelt.
|
||||
|
||||
GEBIET:
|
||||
- Zentrum: {center.get('latitude', 0):.4f}°N, {center.get('longitude', 0):.4f}°E
|
||||
- Fläche: ca. {aoi_info.get('area_km2', 0):.2f} km²
|
||||
- Grenzen: West {bounds.get('west', 0):.4f}°, Süd {bounds.get('south', 0):.4f}°, Ost {bounds.get('east', 0):.4f}°, Nord {bounds.get('north', 0):.4f}°
|
||||
|
||||
THEMA: {theme.value} - {theme_descriptions.get(theme, '')}
|
||||
|
||||
SCHWIERIGKEITSGRAD: {difficulty} - {difficulty_descriptions.get(difficulty, '')}
|
||||
|
||||
ZIELGRUPPE: {grade_level if grade_level else 'Allgemein (Klasse 5-10)'}
|
||||
|
||||
AUFGABE:
|
||||
Erstelle {node_count} Lernstationen im JSON-Format. Jede Station soll:
|
||||
1. Eine geografische Position innerhalb des Gebiets haben
|
||||
2. Eine Lernfrage oder Aufgabe enthalten
|
||||
3. Hinweise zur Lösung bieten
|
||||
4. Die richtige Antwort mit Erklärung enthalten
|
||||
|
||||
FORMAT (JSON-Array):
|
||||
[
|
||||
{{
|
||||
"title": "Titel der Station",
|
||||
"position": {{"latitude": 0.0, "longitude": 0.0}},
|
||||
"question": "Die Lernfrage",
|
||||
"hints": ["Hinweis 1", "Hinweis 2"],
|
||||
"answer": "Die Antwort",
|
||||
"explanation": "Didaktische Erklärung",
|
||||
"node_type": "question|observation|exploration",
|
||||
"points": 10
|
||||
}}
|
||||
]
|
||||
|
||||
WICHTIG:
|
||||
- Positionen müssen innerhalb der Gebietsgrenzen liegen
|
||||
- Fragen sollen zum Thema {theme.value} passen
|
||||
- Sprache: {"Deutsch" if language == "de" else "English"}
|
||||
- Altersgerechte Formulierungen verwenden
|
||||
|
||||
Antworte NUR mit dem JSON-Array, ohne weitere Erklärungen."""
|
||||
|
||||
return prompt
|
||||
|
||||
async def _call_ollama(self, prompt: str) -> str:
|
||||
"""Call Ollama API to generate content."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||
response = await client.post(
|
||||
f"{self.ollama_url}/api/generate",
|
||||
json={
|
||||
"model": self.model,
|
||||
"prompt": prompt,
|
||||
"stream": False,
|
||||
"options": {
|
||||
"temperature": 0.7,
|
||||
"top_p": 0.9,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ConnectionError(f"Ollama returned {response.status_code}")
|
||||
|
||||
result = response.json()
|
||||
return result.get("response", "")
|
||||
|
||||
except httpx.ConnectError:
|
||||
raise ConnectionError("Cannot connect to Ollama")
|
||||
except Exception as e:
|
||||
logger.error("Ollama API error", error=str(e))
|
||||
raise ConnectionError(f"Ollama error: {str(e)}")
|
||||
|
||||
def _parse_llm_response(
|
||||
self, response: str, aoi_id: str, theme: LearningTheme
|
||||
) -> list[LearningNode]:
|
||||
"""Parse LLM response into LearningNode objects."""
|
||||
try:
|
||||
# Find JSON array in response
|
||||
start = response.find("[")
|
||||
end = response.rfind("]") + 1
|
||||
|
||||
if start == -1 or end == 0:
|
||||
raise ValueError("No JSON array found in response")
|
||||
|
||||
json_str = response[start:end]
|
||||
data = json.loads(json_str)
|
||||
|
||||
nodes = []
|
||||
for item in data:
|
||||
node = LearningNode(
|
||||
id=str(uuid.uuid4()),
|
||||
aoi_id=aoi_id,
|
||||
title=item.get("title", "Unbenannte Station"),
|
||||
theme=theme,
|
||||
position={
|
||||
"latitude": item.get("position", {}).get("latitude", 0),
|
||||
"longitude": item.get("position", {}).get("longitude", 0),
|
||||
},
|
||||
question=item.get("question", ""),
|
||||
hints=item.get("hints", []),
|
||||
answer=item.get("answer", ""),
|
||||
explanation=item.get("explanation", ""),
|
||||
node_type=NodeType(item.get("node_type", "question")),
|
||||
points=item.get("points", 10),
|
||||
approved=False,
|
||||
)
|
||||
nodes.append(node)
|
||||
|
||||
return nodes
|
||||
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
logger.error("Failed to parse LLM response", error=str(e))
|
||||
return []
|
||||
|
||||
def _generate_mock_nodes(
|
||||
self,
|
||||
aoi_id: str,
|
||||
theme: LearningTheme,
|
||||
difficulty: str,
|
||||
node_count: int,
|
||||
) -> list[LearningNode]:
|
||||
"""Generate mock learning nodes for development."""
|
||||
mock_questions = {
|
||||
LearningTheme.TOPOGRAPHIE: [
|
||||
("Höhenbestimmung", "Schätze die Höhe dieses Punktes.", "Ca. 500m über NN"),
|
||||
("Hangneigung", "Beschreibe die Steilheit des Hanges.", "Mäßig steil, ca. 15-20°"),
|
||||
("Talform", "Welche Form hat dieses Tal?", "V-förmiges Erosionstal"),
|
||||
],
|
||||
LearningTheme.LANDNUTZUNG: [
|
||||
("Gebäudetypen", "Welche Gebäude siehst du hier?", "Wohnhäuser und landwirtschaftliche Gebäude"),
|
||||
("Flächennutzung", "Wie wird das Land genutzt?", "Landwirtschaft und Siedlung"),
|
||||
("Infrastruktur", "Welche Verkehrswege erkennst du?", "Straße und Feldweg"),
|
||||
],
|
||||
LearningTheme.ORIENTIERUNG: [
|
||||
("Himmelsrichtung", "In welche Richtung fließt der Bach?", "Nach Nordwesten"),
|
||||
("Entfernung", "Wie weit ist es bis zum Waldrand?", "Etwa 200 Meter"),
|
||||
("Wegbeschreibung", "Beschreibe den Weg zum Aussichtspunkt.", "Nordöstlich, bergauf"),
|
||||
],
|
||||
}
|
||||
|
||||
questions = mock_questions.get(theme, mock_questions[LearningTheme.TOPOGRAPHIE])
|
||||
nodes = []
|
||||
|
||||
for i in range(min(node_count, len(questions))):
|
||||
title, question, answer = questions[i]
|
||||
nodes.append(LearningNode(
|
||||
id=str(uuid.uuid4()),
|
||||
aoi_id=aoi_id,
|
||||
title=title,
|
||||
theme=theme,
|
||||
position={"latitude": 47.7 + i * 0.001, "longitude": 9.19 + i * 0.001},
|
||||
question=question,
|
||||
hints=[f"Hinweis {j + 1}" for j in range(2)],
|
||||
answer=answer,
|
||||
explanation=f"Diese Aufgabe trainiert die Beobachtung von {theme.value}.",
|
||||
node_type=NodeType.QUESTION,
|
||||
points=10,
|
||||
approved=False,
|
||||
))
|
||||
|
||||
return nodes
|
||||
|
||||
async def get_nodes_for_aoi(
|
||||
self, aoi_id: str, theme: Optional[LearningTheme] = None
|
||||
) -> Optional[list[LearningNode]]:
|
||||
"""Get all learning nodes for an AOI."""
|
||||
nodes = _learning_nodes.get(aoi_id)
|
||||
|
||||
if nodes is None:
|
||||
return None
|
||||
|
||||
if theme is not None:
|
||||
nodes = [n for n in nodes if n.theme == theme]
|
||||
|
||||
return nodes
|
||||
|
||||
async def update_node(
|
||||
self, aoi_id: str, node_id: str, node_update: LearningNode
|
||||
) -> bool:
|
||||
"""Update a learning node."""
|
||||
nodes = _learning_nodes.get(aoi_id)
|
||||
if nodes is None:
|
||||
return False
|
||||
|
||||
for i, node in enumerate(nodes):
|
||||
if node.id == node_id:
|
||||
_learning_nodes[aoi_id][i] = node_update
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def delete_node(self, aoi_id: str, node_id: str) -> bool:
|
||||
"""Delete a learning node."""
|
||||
nodes = _learning_nodes.get(aoi_id)
|
||||
if nodes is None:
|
||||
return False
|
||||
|
||||
for i, node in enumerate(nodes):
|
||||
if node.id == node_id:
|
||||
del _learning_nodes[aoi_id][i]
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def approve_node(self, aoi_id: str, node_id: str) -> bool:
|
||||
"""Approve a learning node for student use."""
|
||||
nodes = _learning_nodes.get(aoi_id)
|
||||
if nodes is None:
|
||||
return False
|
||||
|
||||
for node in nodes:
|
||||
if node.id == node_id:
|
||||
node.approved = True
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def get_statistics(self) -> dict:
|
||||
"""Get statistics about learning node usage."""
|
||||
total = 0
|
||||
by_theme = {}
|
||||
by_difficulty = {}
|
||||
|
||||
for aoi_nodes in _learning_nodes.values():
|
||||
for node in aoi_nodes:
|
||||
total += 1
|
||||
theme = node.theme.value
|
||||
by_theme[theme] = by_theme.get(theme, 0) + 1
|
||||
|
||||
return {
|
||||
"total_nodes": total,
|
||||
"by_theme": by_theme,
|
||||
"by_difficulty": by_difficulty,
|
||||
"avg_per_aoi": total / len(_learning_nodes) if _learning_nodes else 0,
|
||||
"popular_theme": max(by_theme, key=by_theme.get) if by_theme else "topographie",
|
||||
}
|
||||
217
geo-service/services/osm_extractor.py
Normal file
217
geo-service/services/osm_extractor.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
OSM Extractor Service
|
||||
Extracts OpenStreetMap features from PostGIS or vector tiles
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
from typing import Optional
|
||||
import structlog
|
||||
from shapely.geometry import shape, mapping
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class OSMExtractorService:
|
||||
"""
|
||||
Service for extracting OSM features from a geographic area.
|
||||
|
||||
Can extract from:
|
||||
- PostGIS database (imported OSM data)
|
||||
- PMTiles archive
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.database_url = settings.database_url
|
||||
|
||||
async def extract_features(self, polygon: dict) -> dict:
|
||||
"""
|
||||
Extract OSM features within a polygon.
|
||||
|
||||
Returns a GeoJSON FeatureCollection with categorized features.
|
||||
"""
|
||||
geom = shape(polygon)
|
||||
bounds = geom.bounds # (minx, miny, maxx, maxy)
|
||||
|
||||
# Feature collection structure
|
||||
features = {
|
||||
"type": "FeatureCollection",
|
||||
"features": [],
|
||||
"metadata": {
|
||||
"source": "OpenStreetMap",
|
||||
"license": "ODbL",
|
||||
"bounds": {
|
||||
"west": bounds[0],
|
||||
"south": bounds[1],
|
||||
"east": bounds[2],
|
||||
"north": bounds[3],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Try to extract from database
|
||||
try:
|
||||
db_features = await self._extract_from_database(geom)
|
||||
if db_features:
|
||||
features["features"].extend(db_features)
|
||||
return features
|
||||
except Exception as e:
|
||||
logger.warning("Database extraction failed", error=str(e))
|
||||
|
||||
# Fall back to mock data for development
|
||||
features["features"] = self._generate_mock_features(geom)
|
||||
features["metadata"]["source"] = "Mock Data (OSM data not imported)"
|
||||
|
||||
return features
|
||||
|
||||
async def _extract_from_database(self, geom) -> list:
|
||||
"""Extract features from PostGIS database."""
|
||||
# This would use asyncpg to query the database
|
||||
# For now, return empty list to trigger mock data
|
||||
|
||||
# Example query structure (not executed):
|
||||
# SELECT ST_AsGeoJSON(way), name, building, highway, natural, waterway
|
||||
# FROM planet_osm_polygon
|
||||
# WHERE ST_Intersects(way, ST_GeomFromGeoJSON($1))
|
||||
|
||||
return []
|
||||
|
||||
def _generate_mock_features(self, geom) -> list:
|
||||
"""Generate mock OSM features for development."""
|
||||
from shapely.geometry import Point, LineString, Polygon as ShapelyPolygon
|
||||
import random
|
||||
|
||||
bounds = geom.bounds
|
||||
features = []
|
||||
|
||||
# Generate some mock buildings
|
||||
for i in range(5):
|
||||
x = random.uniform(bounds[0], bounds[2])
|
||||
y = random.uniform(bounds[1], bounds[3])
|
||||
|
||||
# Small building polygon
|
||||
size = 0.0002 # ~20m
|
||||
building = ShapelyPolygon([
|
||||
(x, y),
|
||||
(x + size, y),
|
||||
(x + size, y + size),
|
||||
(x, y + size),
|
||||
(x, y),
|
||||
])
|
||||
|
||||
if geom.contains(building.centroid):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(building),
|
||||
"properties": {
|
||||
"category": "building",
|
||||
"building": "yes",
|
||||
"name": f"Gebäude {i + 1}",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate some mock roads
|
||||
for i in range(3):
|
||||
x1 = random.uniform(bounds[0], bounds[2])
|
||||
y1 = random.uniform(bounds[1], bounds[3])
|
||||
x2 = random.uniform(bounds[0], bounds[2])
|
||||
y2 = random.uniform(bounds[1], bounds[3])
|
||||
|
||||
road = LineString([(x1, y1), (x2, y2)])
|
||||
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(road),
|
||||
"properties": {
|
||||
"category": "road",
|
||||
"highway": random.choice(["primary", "secondary", "residential"]),
|
||||
"name": f"Straße {i + 1}",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate mock water feature
|
||||
cx = (bounds[0] + bounds[2]) / 2
|
||||
cy = (bounds[1] + bounds[3]) / 2
|
||||
size = min(bounds[2] - bounds[0], bounds[3] - bounds[1]) * 0.2
|
||||
|
||||
water = ShapelyPolygon([
|
||||
(cx - size, cy - size / 2),
|
||||
(cx + size, cy - size / 2),
|
||||
(cx + size, cy + size / 2),
|
||||
(cx - size, cy + size / 2),
|
||||
(cx - size, cy - size / 2),
|
||||
])
|
||||
|
||||
if geom.intersects(water):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(water.intersection(geom)),
|
||||
"properties": {
|
||||
"category": "water",
|
||||
"natural": "water",
|
||||
"name": "See",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate mock forest
|
||||
forest_size = size * 1.5
|
||||
forest = ShapelyPolygon([
|
||||
(bounds[0], bounds[1]),
|
||||
(bounds[0] + forest_size, bounds[1]),
|
||||
(bounds[0] + forest_size, bounds[1] + forest_size),
|
||||
(bounds[0], bounds[1] + forest_size),
|
||||
(bounds[0], bounds[1]),
|
||||
])
|
||||
|
||||
if geom.intersects(forest):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(forest.intersection(geom)),
|
||||
"properties": {
|
||||
"category": "vegetation",
|
||||
"landuse": "forest",
|
||||
"name": "Wald",
|
||||
},
|
||||
})
|
||||
|
||||
return features
|
||||
|
||||
async def get_feature_statistics(self, polygon: dict) -> dict:
|
||||
"""Get statistics about features in an area."""
|
||||
features = await self.extract_features(polygon)
|
||||
|
||||
categories = {}
|
||||
for feature in features.get("features", []):
|
||||
category = feature.get("properties", {}).get("category", "other")
|
||||
categories[category] = categories.get(category, 0) + 1
|
||||
|
||||
return {
|
||||
"total_features": len(features.get("features", [])),
|
||||
"by_category": categories,
|
||||
}
|
||||
|
||||
async def search_features(
|
||||
self,
|
||||
polygon: dict,
|
||||
category: str,
|
||||
name_filter: Optional[str] = None,
|
||||
) -> list:
|
||||
"""Search for specific features within an area."""
|
||||
all_features = await self.extract_features(polygon)
|
||||
|
||||
filtered = []
|
||||
for feature in all_features.get("features", []):
|
||||
props = feature.get("properties", {})
|
||||
|
||||
if props.get("category") != category:
|
||||
continue
|
||||
|
||||
if name_filter:
|
||||
name = props.get("name", "")
|
||||
if name_filter.lower() not in name.lower():
|
||||
continue
|
||||
|
||||
filtered.append(feature)
|
||||
|
||||
return filtered
|
||||
186
geo-service/services/tile_server.py
Normal file
186
geo-service/services/tile_server.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""
|
||||
Tile Server Service
|
||||
Serves vector tiles from PMTiles format or generates on-demand from PostGIS
|
||||
"""
|
||||
import os
|
||||
import gzip
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
import structlog
|
||||
from pmtiles.reader import Reader as PMTilesReader
|
||||
from pmtiles.tile import TileType
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class MMapFileReader:
|
||||
"""Memory-mapped file reader for PMTiles."""
|
||||
|
||||
def __init__(self, path: str):
|
||||
self.path = path
|
||||
self._file = None
|
||||
self._size = 0
|
||||
|
||||
def __enter__(self):
|
||||
self._file = open(self.path, "rb")
|
||||
self._file.seek(0, 2) # Seek to end
|
||||
self._size = self._file.tell()
|
||||
self._file.seek(0)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._file:
|
||||
self._file.close()
|
||||
|
||||
def read(self, offset: int, length: int) -> bytes:
|
||||
"""Read bytes from file at offset."""
|
||||
self._file.seek(offset)
|
||||
return self._file.read(length)
|
||||
|
||||
def size(self) -> int:
|
||||
"""Get file size."""
|
||||
return self._size
|
||||
|
||||
|
||||
class TileServerService:
|
||||
"""
|
||||
Service for serving vector tiles from PMTiles format.
|
||||
|
||||
PMTiles is a cloud-optimized format for tile archives that allows
|
||||
random access to individual tiles without extracting the entire archive.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.pmtiles_path = settings.pmtiles_path
|
||||
self.cache_dir = settings.tile_cache_dir
|
||||
self._reader = None
|
||||
self._metadata_cache = None
|
||||
|
||||
def _get_reader(self) -> Optional[PMTilesReader]:
|
||||
"""Get or create PMTiles reader."""
|
||||
if not os.path.exists(self.pmtiles_path):
|
||||
logger.warning("PMTiles file not found", path=self.pmtiles_path)
|
||||
return None
|
||||
|
||||
if self._reader is None:
|
||||
try:
|
||||
file_reader = MMapFileReader(self.pmtiles_path)
|
||||
file_reader.__enter__()
|
||||
self._reader = PMTilesReader(file_reader)
|
||||
logger.info("PMTiles reader initialized", path=self.pmtiles_path)
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize PMTiles reader", error=str(e))
|
||||
return None
|
||||
|
||||
return self._reader
|
||||
|
||||
async def get_tile(self, z: int, x: int, y: int) -> Optional[bytes]:
|
||||
"""
|
||||
Get a vector tile at the specified coordinates.
|
||||
|
||||
Args:
|
||||
z: Zoom level
|
||||
x: Tile X coordinate
|
||||
y: Tile Y coordinate
|
||||
|
||||
Returns:
|
||||
Tile data as gzipped protobuf, or None if not found
|
||||
"""
|
||||
# Check cache first
|
||||
cache_path = os.path.join(self.cache_dir, str(z), str(x), f"{y}.pbf")
|
||||
if os.path.exists(cache_path):
|
||||
with open(cache_path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
# Try to get from PMTiles
|
||||
reader = self._get_reader()
|
||||
if reader is None:
|
||||
raise FileNotFoundError("PMTiles file not available")
|
||||
|
||||
try:
|
||||
tile_data = reader.get_tile(z, x, y)
|
||||
|
||||
if tile_data is None:
|
||||
return None
|
||||
|
||||
# Cache the tile
|
||||
await self._cache_tile(z, x, y, tile_data)
|
||||
|
||||
return tile_data
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading tile", z=z, x=x, y=y, error=str(e))
|
||||
return None
|
||||
|
||||
async def _cache_tile(self, z: int, x: int, y: int, data: bytes):
|
||||
"""Cache a tile to disk."""
|
||||
cache_path = os.path.join(self.cache_dir, str(z), str(x))
|
||||
os.makedirs(cache_path, exist_ok=True)
|
||||
|
||||
tile_path = os.path.join(cache_path, f"{y}.pbf")
|
||||
with open(tile_path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
async def get_metadata(self) -> dict:
|
||||
"""
|
||||
Get metadata about the tile archive.
|
||||
|
||||
Returns:
|
||||
Dictionary with metadata including bounds, zoom levels, etc.
|
||||
"""
|
||||
if self._metadata_cache is not None:
|
||||
return self._metadata_cache
|
||||
|
||||
reader = self._get_reader()
|
||||
if reader is None:
|
||||
return {
|
||||
"data_available": False,
|
||||
"minzoom": 0,
|
||||
"maxzoom": 14,
|
||||
"bounds": [5.87, 47.27, 15.04, 55.06],
|
||||
"center": [10.45, 51.16, 6],
|
||||
}
|
||||
|
||||
try:
|
||||
header = reader.header()
|
||||
metadata = reader.metadata()
|
||||
|
||||
self._metadata_cache = {
|
||||
"data_available": True,
|
||||
"minzoom": header.get("minZoom", 0),
|
||||
"maxzoom": header.get("maxZoom", 14),
|
||||
"bounds": header.get("bounds", [5.87, 47.27, 15.04, 55.06]),
|
||||
"center": header.get("center", [10.45, 51.16, 6]),
|
||||
"tile_type": "mvt", # Mapbox Vector Tiles
|
||||
"last_updated": datetime.fromtimestamp(
|
||||
os.path.getmtime(self.pmtiles_path)
|
||||
).isoformat() if os.path.exists(self.pmtiles_path) else None,
|
||||
**metadata,
|
||||
}
|
||||
|
||||
return self._metadata_cache
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error reading metadata", error=str(e))
|
||||
return {"data_available": False}
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear the tile cache."""
|
||||
import shutil
|
||||
|
||||
if os.path.exists(self.cache_dir):
|
||||
shutil.rmtree(self.cache_dir)
|
||||
os.makedirs(self.cache_dir)
|
||||
logger.info("Tile cache cleared")
|
||||
|
||||
def get_cache_size_mb(self) -> float:
|
||||
"""Get the current cache size in MB."""
|
||||
total_size = 0
|
||||
for dirpath, dirnames, filenames in os.walk(self.cache_dir):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
total_size += os.path.getsize(filepath)
|
||||
|
||||
return total_size / (1024 * 1024)
|
||||
Reference in New Issue
Block a user