fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
217
geo-service/services/osm_extractor.py
Normal file
217
geo-service/services/osm_extractor.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
OSM Extractor Service
|
||||
Extracts OpenStreetMap features from PostGIS or vector tiles
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
from typing import Optional
|
||||
import structlog
|
||||
from shapely.geometry import shape, mapping
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class OSMExtractorService:
|
||||
"""
|
||||
Service for extracting OSM features from a geographic area.
|
||||
|
||||
Can extract from:
|
||||
- PostGIS database (imported OSM data)
|
||||
- PMTiles archive
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.database_url = settings.database_url
|
||||
|
||||
async def extract_features(self, polygon: dict) -> dict:
|
||||
"""
|
||||
Extract OSM features within a polygon.
|
||||
|
||||
Returns a GeoJSON FeatureCollection with categorized features.
|
||||
"""
|
||||
geom = shape(polygon)
|
||||
bounds = geom.bounds # (minx, miny, maxx, maxy)
|
||||
|
||||
# Feature collection structure
|
||||
features = {
|
||||
"type": "FeatureCollection",
|
||||
"features": [],
|
||||
"metadata": {
|
||||
"source": "OpenStreetMap",
|
||||
"license": "ODbL",
|
||||
"bounds": {
|
||||
"west": bounds[0],
|
||||
"south": bounds[1],
|
||||
"east": bounds[2],
|
||||
"north": bounds[3],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Try to extract from database
|
||||
try:
|
||||
db_features = await self._extract_from_database(geom)
|
||||
if db_features:
|
||||
features["features"].extend(db_features)
|
||||
return features
|
||||
except Exception as e:
|
||||
logger.warning("Database extraction failed", error=str(e))
|
||||
|
||||
# Fall back to mock data for development
|
||||
features["features"] = self._generate_mock_features(geom)
|
||||
features["metadata"]["source"] = "Mock Data (OSM data not imported)"
|
||||
|
||||
return features
|
||||
|
||||
async def _extract_from_database(self, geom) -> list:
|
||||
"""Extract features from PostGIS database."""
|
||||
# This would use asyncpg to query the database
|
||||
# For now, return empty list to trigger mock data
|
||||
|
||||
# Example query structure (not executed):
|
||||
# SELECT ST_AsGeoJSON(way), name, building, highway, natural, waterway
|
||||
# FROM planet_osm_polygon
|
||||
# WHERE ST_Intersects(way, ST_GeomFromGeoJSON($1))
|
||||
|
||||
return []
|
||||
|
||||
def _generate_mock_features(self, geom) -> list:
|
||||
"""Generate mock OSM features for development."""
|
||||
from shapely.geometry import Point, LineString, Polygon as ShapelyPolygon
|
||||
import random
|
||||
|
||||
bounds = geom.bounds
|
||||
features = []
|
||||
|
||||
# Generate some mock buildings
|
||||
for i in range(5):
|
||||
x = random.uniform(bounds[0], bounds[2])
|
||||
y = random.uniform(bounds[1], bounds[3])
|
||||
|
||||
# Small building polygon
|
||||
size = 0.0002 # ~20m
|
||||
building = ShapelyPolygon([
|
||||
(x, y),
|
||||
(x + size, y),
|
||||
(x + size, y + size),
|
||||
(x, y + size),
|
||||
(x, y),
|
||||
])
|
||||
|
||||
if geom.contains(building.centroid):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(building),
|
||||
"properties": {
|
||||
"category": "building",
|
||||
"building": "yes",
|
||||
"name": f"Gebäude {i + 1}",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate some mock roads
|
||||
for i in range(3):
|
||||
x1 = random.uniform(bounds[0], bounds[2])
|
||||
y1 = random.uniform(bounds[1], bounds[3])
|
||||
x2 = random.uniform(bounds[0], bounds[2])
|
||||
y2 = random.uniform(bounds[1], bounds[3])
|
||||
|
||||
road = LineString([(x1, y1), (x2, y2)])
|
||||
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(road),
|
||||
"properties": {
|
||||
"category": "road",
|
||||
"highway": random.choice(["primary", "secondary", "residential"]),
|
||||
"name": f"Straße {i + 1}",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate mock water feature
|
||||
cx = (bounds[0] + bounds[2]) / 2
|
||||
cy = (bounds[1] + bounds[3]) / 2
|
||||
size = min(bounds[2] - bounds[0], bounds[3] - bounds[1]) * 0.2
|
||||
|
||||
water = ShapelyPolygon([
|
||||
(cx - size, cy - size / 2),
|
||||
(cx + size, cy - size / 2),
|
||||
(cx + size, cy + size / 2),
|
||||
(cx - size, cy + size / 2),
|
||||
(cx - size, cy - size / 2),
|
||||
])
|
||||
|
||||
if geom.intersects(water):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(water.intersection(geom)),
|
||||
"properties": {
|
||||
"category": "water",
|
||||
"natural": "water",
|
||||
"name": "See",
|
||||
},
|
||||
})
|
||||
|
||||
# Generate mock forest
|
||||
forest_size = size * 1.5
|
||||
forest = ShapelyPolygon([
|
||||
(bounds[0], bounds[1]),
|
||||
(bounds[0] + forest_size, bounds[1]),
|
||||
(bounds[0] + forest_size, bounds[1] + forest_size),
|
||||
(bounds[0], bounds[1] + forest_size),
|
||||
(bounds[0], bounds[1]),
|
||||
])
|
||||
|
||||
if geom.intersects(forest):
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"geometry": mapping(forest.intersection(geom)),
|
||||
"properties": {
|
||||
"category": "vegetation",
|
||||
"landuse": "forest",
|
||||
"name": "Wald",
|
||||
},
|
||||
})
|
||||
|
||||
return features
|
||||
|
||||
async def get_feature_statistics(self, polygon: dict) -> dict:
|
||||
"""Get statistics about features in an area."""
|
||||
features = await self.extract_features(polygon)
|
||||
|
||||
categories = {}
|
||||
for feature in features.get("features", []):
|
||||
category = feature.get("properties", {}).get("category", "other")
|
||||
categories[category] = categories.get(category, 0) + 1
|
||||
|
||||
return {
|
||||
"total_features": len(features.get("features", [])),
|
||||
"by_category": categories,
|
||||
}
|
||||
|
||||
async def search_features(
|
||||
self,
|
||||
polygon: dict,
|
||||
category: str,
|
||||
name_filter: Optional[str] = None,
|
||||
) -> list:
|
||||
"""Search for specific features within an area."""
|
||||
all_features = await self.extract_features(polygon)
|
||||
|
||||
filtered = []
|
||||
for feature in all_features.get("features", []):
|
||||
props = feature.get("properties", {})
|
||||
|
||||
if props.get("category") != category:
|
||||
continue
|
||||
|
||||
if name_filter:
|
||||
name = props.get("name", "")
|
||||
if name_filter.lower() not in name.lower():
|
||||
continue
|
||||
|
||||
filtered.append(feature)
|
||||
|
||||
return filtered
|
||||
Reference in New Issue
Block a user