fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
20
geo-service/utils/__init__.py
Normal file
20
geo-service/utils/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
GeoEdu Service - Utility Functions
|
||||
"""
|
||||
from .geo_utils import (
|
||||
lat_lon_to_tile,
|
||||
tile_to_bounds,
|
||||
calculate_distance,
|
||||
transform_coordinates,
|
||||
)
|
||||
from .minio_client import MinioClient
|
||||
from .license_checker import LicenseChecker
|
||||
|
||||
__all__ = [
|
||||
"lat_lon_to_tile",
|
||||
"tile_to_bounds",
|
||||
"calculate_distance",
|
||||
"transform_coordinates",
|
||||
"MinioClient",
|
||||
"LicenseChecker",
|
||||
]
|
||||
262
geo-service/utils/geo_utils.py
Normal file
262
geo-service/utils/geo_utils.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""
|
||||
Geographic Utility Functions
|
||||
Coordinate transformations, distance calculations, and tile math
|
||||
"""
|
||||
import math
|
||||
from typing import Tuple, Optional
|
||||
import pyproj
|
||||
from shapely.geometry import Point, Polygon, shape
|
||||
from shapely.ops import transform
|
||||
|
||||
|
||||
# Web Mercator (EPSG:3857) and WGS84 (EPSG:4326) transformers
|
||||
WGS84 = pyproj.CRS("EPSG:4326")
|
||||
WEB_MERCATOR = pyproj.CRS("EPSG:3857")
|
||||
ETRS89_LAEA = pyproj.CRS("EPSG:3035") # Equal area for Europe
|
||||
|
||||
|
||||
def lat_lon_to_tile(lat: float, lon: float, zoom: int) -> Tuple[int, int]:
|
||||
"""
|
||||
Convert latitude/longitude to tile coordinates (XYZ scheme).
|
||||
|
||||
Args:
|
||||
lat: Latitude in degrees (-85.05 to 85.05)
|
||||
lon: Longitude in degrees (-180 to 180)
|
||||
zoom: Zoom level (0-22)
|
||||
|
||||
Returns:
|
||||
Tuple of (x, y) tile coordinates
|
||||
"""
|
||||
n = 2 ** zoom
|
||||
x = int((lon + 180.0) / 360.0 * n)
|
||||
lat_rad = math.radians(lat)
|
||||
y = int((1.0 - math.asinh(math.tan(lat_rad)) / math.pi) / 2.0 * n)
|
||||
|
||||
# Clamp to valid range
|
||||
x = max(0, min(n - 1, x))
|
||||
y = max(0, min(n - 1, y))
|
||||
|
||||
return x, y
|
||||
|
||||
|
||||
def tile_to_bounds(z: int, x: int, y: int) -> Tuple[float, float, float, float]:
|
||||
"""
|
||||
Convert tile coordinates to bounding box.
|
||||
|
||||
Args:
|
||||
z: Zoom level
|
||||
x: Tile X coordinate
|
||||
y: Tile Y coordinate
|
||||
|
||||
Returns:
|
||||
Tuple of (west, south, east, north) in degrees
|
||||
"""
|
||||
n = 2 ** z
|
||||
|
||||
west = x / n * 360.0 - 180.0
|
||||
east = (x + 1) / n * 360.0 - 180.0
|
||||
|
||||
north_rad = math.atan(math.sinh(math.pi * (1 - 2 * y / n)))
|
||||
south_rad = math.atan(math.sinh(math.pi * (1 - 2 * (y + 1) / n)))
|
||||
|
||||
north = math.degrees(north_rad)
|
||||
south = math.degrees(south_rad)
|
||||
|
||||
return west, south, east, north
|
||||
|
||||
|
||||
def tile_to_center(z: int, x: int, y: int) -> Tuple[float, float]:
|
||||
"""
|
||||
Get the center point of a tile.
|
||||
|
||||
Returns:
|
||||
Tuple of (longitude, latitude) in degrees
|
||||
"""
|
||||
west, south, east, north = tile_to_bounds(z, x, y)
|
||||
return (west + east) / 2, (south + north) / 2
|
||||
|
||||
|
||||
def calculate_distance(
|
||||
lat1: float, lon1: float, lat2: float, lon2: float
|
||||
) -> float:
|
||||
"""
|
||||
Calculate the distance between two points using the Haversine formula.
|
||||
|
||||
Args:
|
||||
lat1, lon1: First point coordinates in degrees
|
||||
lat2, lon2: Second point coordinates in degrees
|
||||
|
||||
Returns:
|
||||
Distance in meters
|
||||
"""
|
||||
R = 6371000 # Earth's radius in meters
|
||||
|
||||
lat1_rad = math.radians(lat1)
|
||||
lat2_rad = math.radians(lat2)
|
||||
delta_lat = math.radians(lat2 - lat1)
|
||||
delta_lon = math.radians(lon2 - lon1)
|
||||
|
||||
a = (
|
||||
math.sin(delta_lat / 2) ** 2
|
||||
+ math.cos(lat1_rad) * math.cos(lat2_rad) * math.sin(delta_lon / 2) ** 2
|
||||
)
|
||||
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||
|
||||
return R * c
|
||||
|
||||
|
||||
def transform_coordinates(
|
||||
geometry,
|
||||
from_crs: str = "EPSG:4326",
|
||||
to_crs: str = "EPSG:3857",
|
||||
):
|
||||
"""
|
||||
Transform a Shapely geometry between coordinate reference systems.
|
||||
|
||||
Args:
|
||||
geometry: Shapely geometry object
|
||||
from_crs: Source CRS (default WGS84)
|
||||
to_crs: Target CRS (default Web Mercator)
|
||||
|
||||
Returns:
|
||||
Transformed geometry
|
||||
"""
|
||||
transformer = pyproj.Transformer.from_crs(
|
||||
from_crs,
|
||||
to_crs,
|
||||
always_xy=True,
|
||||
)
|
||||
|
||||
return transform(transformer.transform, geometry)
|
||||
|
||||
|
||||
def calculate_area_km2(geojson: dict) -> float:
|
||||
"""
|
||||
Calculate the area of a GeoJSON polygon in square kilometers.
|
||||
|
||||
Uses ETRS89-LAEA projection for accurate area calculation in Europe.
|
||||
|
||||
Args:
|
||||
geojson: GeoJSON geometry dict
|
||||
|
||||
Returns:
|
||||
Area in square kilometers
|
||||
"""
|
||||
geom = shape(geojson)
|
||||
geom_projected = transform_coordinates(geom, "EPSG:4326", "EPSG:3035")
|
||||
return geom_projected.area / 1_000_000
|
||||
|
||||
|
||||
def is_within_bounds(
|
||||
point: Tuple[float, float],
|
||||
bounds: Tuple[float, float, float, float],
|
||||
) -> bool:
|
||||
"""
|
||||
Check if a point is within a bounding box.
|
||||
|
||||
Args:
|
||||
point: (longitude, latitude) tuple
|
||||
bounds: (west, south, east, north) tuple
|
||||
|
||||
Returns:
|
||||
True if point is within bounds
|
||||
"""
|
||||
lon, lat = point
|
||||
west, south, east, north = bounds
|
||||
return west <= lon <= east and south <= lat <= north
|
||||
|
||||
|
||||
def get_germany_bounds() -> Tuple[float, float, float, float]:
|
||||
"""Get the bounding box of Germany."""
|
||||
return (5.87, 47.27, 15.04, 55.06)
|
||||
|
||||
|
||||
def meters_per_pixel(lat: float, zoom: int) -> float:
|
||||
"""
|
||||
Calculate the ground resolution at a given latitude and zoom level.
|
||||
|
||||
Args:
|
||||
lat: Latitude in degrees
|
||||
zoom: Zoom level
|
||||
|
||||
Returns:
|
||||
Meters per pixel at that location and zoom
|
||||
"""
|
||||
# Earth's circumference at equator in meters
|
||||
C = 40075016.686
|
||||
|
||||
# Resolution at equator for this zoom level
|
||||
resolution_equator = C / (256 * (2 ** zoom))
|
||||
|
||||
# Adjust for latitude (Mercator projection)
|
||||
return resolution_equator * math.cos(math.radians(lat))
|
||||
|
||||
|
||||
def simplify_polygon(geojson: dict, tolerance: float = 0.0001) -> dict:
|
||||
"""
|
||||
Simplify a polygon geometry to reduce complexity.
|
||||
|
||||
Args:
|
||||
geojson: GeoJSON geometry dict
|
||||
tolerance: Simplification tolerance in degrees
|
||||
|
||||
Returns:
|
||||
Simplified GeoJSON geometry
|
||||
"""
|
||||
from shapely.geometry import mapping
|
||||
|
||||
geom = shape(geojson)
|
||||
simplified = geom.simplify(tolerance, preserve_topology=True)
|
||||
return mapping(simplified)
|
||||
|
||||
|
||||
def buffer_polygon(geojson: dict, distance_meters: float) -> dict:
|
||||
"""
|
||||
Buffer a polygon by a distance in meters.
|
||||
|
||||
Args:
|
||||
geojson: GeoJSON geometry dict
|
||||
distance_meters: Buffer distance in meters
|
||||
|
||||
Returns:
|
||||
Buffered GeoJSON geometry
|
||||
"""
|
||||
from shapely.geometry import mapping
|
||||
|
||||
geom = shape(geojson)
|
||||
|
||||
# Transform to metric CRS, buffer, transform back
|
||||
geom_metric = transform_coordinates(geom, "EPSG:4326", "EPSG:3035")
|
||||
buffered = geom_metric.buffer(distance_meters)
|
||||
geom_wgs84 = transform_coordinates(buffered, "EPSG:3035", "EPSG:4326")
|
||||
|
||||
return mapping(geom_wgs84)
|
||||
|
||||
|
||||
def get_tiles_for_bounds(
|
||||
bounds: Tuple[float, float, float, float],
|
||||
zoom: int,
|
||||
) -> list[Tuple[int, int]]:
|
||||
"""
|
||||
Get all tile coordinates that cover a bounding box.
|
||||
|
||||
Args:
|
||||
bounds: (west, south, east, north) in degrees
|
||||
zoom: Zoom level
|
||||
|
||||
Returns:
|
||||
List of (x, y) tile coordinates
|
||||
"""
|
||||
west, south, east, north = bounds
|
||||
|
||||
# Get corner tiles
|
||||
x_min, y_max = lat_lon_to_tile(south, west, zoom)
|
||||
x_max, y_min = lat_lon_to_tile(north, east, zoom)
|
||||
|
||||
# Generate all tiles in range
|
||||
tiles = []
|
||||
for x in range(x_min, x_max + 1):
|
||||
for y in range(y_min, y_max + 1):
|
||||
tiles.append((x, y))
|
||||
|
||||
return tiles
|
||||
223
geo-service/utils/license_checker.py
Normal file
223
geo-service/utils/license_checker.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""
|
||||
License Checker Utility
|
||||
Validates data source licenses and generates attribution
|
||||
"""
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class LicenseType(Enum):
|
||||
"""Supported data source license types."""
|
||||
ODBL = "odbl" # OpenStreetMap
|
||||
COPERNICUS = "copernicus" # Copernicus DEM
|
||||
CC_BY = "cc-by" # Creative Commons Attribution
|
||||
CC_BY_SA = "cc-by-sa" # Creative Commons Attribution-ShareAlike
|
||||
CC0 = "cc0" # Public Domain
|
||||
PROPRIETARY = "proprietary" # Not allowed
|
||||
|
||||
|
||||
class DataSource(Enum):
|
||||
"""Known data sources."""
|
||||
OPENSTREETMAP = "openstreetmap"
|
||||
COPERNICUS_DEM = "copernicus_dem"
|
||||
OPENAERIAL = "openaerial"
|
||||
WIKIMEDIA = "wikimedia"
|
||||
GOOGLE = "google" # FORBIDDEN
|
||||
BING = "bing" # FORBIDDEN
|
||||
APPLE = "apple" # FORBIDDEN
|
||||
HERE = "here" # FORBIDDEN
|
||||
|
||||
|
||||
# License information for allowed sources
|
||||
ALLOWED_SOURCES = {
|
||||
DataSource.OPENSTREETMAP: {
|
||||
"license": LicenseType.ODBL,
|
||||
"attribution": "© OpenStreetMap contributors",
|
||||
"url": "https://www.openstreetmap.org/copyright",
|
||||
"commercial": True,
|
||||
"derivative_allowed": True,
|
||||
},
|
||||
DataSource.COPERNICUS_DEM: {
|
||||
"license": LicenseType.COPERNICUS,
|
||||
"attribution": "© Copernicus Service Information",
|
||||
"url": "https://spacedata.copernicus.eu/",
|
||||
"commercial": True,
|
||||
"derivative_allowed": True,
|
||||
},
|
||||
DataSource.OPENAERIAL: {
|
||||
"license": LicenseType.CC_BY,
|
||||
"attribution": "© OpenAerialMap contributors",
|
||||
"url": "https://openaerialmap.org/",
|
||||
"commercial": True,
|
||||
"derivative_allowed": True,
|
||||
},
|
||||
DataSource.WIKIMEDIA: {
|
||||
"license": LicenseType.CC_BY_SA,
|
||||
"attribution": "Wikimedia Commons",
|
||||
"url": "https://commons.wikimedia.org/",
|
||||
"commercial": True,
|
||||
"derivative_allowed": True,
|
||||
},
|
||||
}
|
||||
|
||||
# Forbidden sources
|
||||
FORBIDDEN_SOURCES = {
|
||||
DataSource.GOOGLE: "Google Maps ToS prohibit derivatives and offline use",
|
||||
DataSource.BING: "Bing Maps has restrictive licensing",
|
||||
DataSource.APPLE: "Apple Maps prohibits commercial use",
|
||||
DataSource.HERE: "HERE requires paid licensing",
|
||||
}
|
||||
|
||||
|
||||
class LicenseChecker:
|
||||
"""
|
||||
Utility for validating data source licenses and generating attributions.
|
||||
|
||||
Ensures DSGVO/GDPR compliance and proper licensing for educational use.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def is_source_allowed(source: DataSource) -> bool:
|
||||
"""Check if a data source is allowed for use."""
|
||||
return source in ALLOWED_SOURCES
|
||||
|
||||
@staticmethod
|
||||
def get_forbidden_reason(source: DataSource) -> Optional[str]:
|
||||
"""Get the reason why a source is forbidden."""
|
||||
return FORBIDDEN_SOURCES.get(source)
|
||||
|
||||
@staticmethod
|
||||
def validate_url(url: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Validate if a URL is from an allowed source.
|
||||
|
||||
Returns:
|
||||
Tuple of (is_allowed, message)
|
||||
"""
|
||||
url_lower = url.lower()
|
||||
|
||||
# Check for forbidden sources
|
||||
forbidden_patterns = {
|
||||
"google": DataSource.GOOGLE,
|
||||
"googleapis": DataSource.GOOGLE,
|
||||
"gstatic": DataSource.GOOGLE,
|
||||
"bing.com": DataSource.BING,
|
||||
"virtualearth": DataSource.BING,
|
||||
"apple.com/maps": DataSource.APPLE,
|
||||
"here.com": DataSource.HERE,
|
||||
}
|
||||
|
||||
for pattern, source in forbidden_patterns.items():
|
||||
if pattern in url_lower:
|
||||
reason = FORBIDDEN_SOURCES.get(source, "Not allowed")
|
||||
return False, f"FORBIDDEN: {source.value} - {reason}"
|
||||
|
||||
# Check for allowed sources
|
||||
allowed_patterns = {
|
||||
"openstreetmap": DataSource.OPENSTREETMAP,
|
||||
"tile.osm": DataSource.OPENSTREETMAP,
|
||||
"copernicus": DataSource.COPERNICUS_DEM,
|
||||
"openaerialmap": DataSource.OPENAERIAL,
|
||||
"wikimedia": DataSource.WIKIMEDIA,
|
||||
}
|
||||
|
||||
for pattern, source in allowed_patterns.items():
|
||||
if pattern in url_lower:
|
||||
info = ALLOWED_SOURCES[source]
|
||||
return True, f"ALLOWED: {source.value} ({info['license'].value})"
|
||||
|
||||
# Unknown source - warn but allow with custom attribution
|
||||
return True, "UNKNOWN: Verify license manually"
|
||||
|
||||
@staticmethod
|
||||
def get_attribution_for_sources(
|
||||
sources: list[DataSource],
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Get attribution information for a list of data sources.
|
||||
|
||||
Args:
|
||||
sources: List of data sources used
|
||||
|
||||
Returns:
|
||||
List of attribution dictionaries
|
||||
"""
|
||||
attributions = []
|
||||
|
||||
for source in sources:
|
||||
if source in ALLOWED_SOURCES:
|
||||
info = ALLOWED_SOURCES[source]
|
||||
attributions.append({
|
||||
"name": source.value.replace("_", " ").title(),
|
||||
"license": info["license"].value.upper(),
|
||||
"attribution": info["attribution"],
|
||||
"url": info["url"],
|
||||
"required": True,
|
||||
})
|
||||
|
||||
return attributions
|
||||
|
||||
@staticmethod
|
||||
def generate_attribution_html(sources: list[DataSource]) -> str:
|
||||
"""
|
||||
Generate HTML attribution footer for web display.
|
||||
|
||||
Args:
|
||||
sources: List of data sources used
|
||||
|
||||
Returns:
|
||||
HTML string with attribution
|
||||
"""
|
||||
attributions = LicenseChecker.get_attribution_for_sources(sources)
|
||||
|
||||
if not attributions:
|
||||
return ""
|
||||
|
||||
parts = []
|
||||
for attr in attributions:
|
||||
parts.append(
|
||||
f'<a href="{attr["url"]}" target="_blank" rel="noopener">'
|
||||
f'{attr["attribution"]}</a>'
|
||||
)
|
||||
|
||||
return " | ".join(parts)
|
||||
|
||||
@staticmethod
|
||||
def generate_attribution_text(sources: list[DataSource]) -> str:
|
||||
"""
|
||||
Generate plain text attribution.
|
||||
|
||||
Args:
|
||||
sources: List of data sources used
|
||||
|
||||
Returns:
|
||||
Plain text attribution string
|
||||
"""
|
||||
attributions = LicenseChecker.get_attribution_for_sources(sources)
|
||||
|
||||
if not attributions:
|
||||
return ""
|
||||
|
||||
return " | ".join(attr["attribution"] for attr in attributions)
|
||||
|
||||
@staticmethod
|
||||
def check_commercial_use(sources: list[DataSource]) -> tuple[bool, list[str]]:
|
||||
"""
|
||||
Check if all sources allow commercial use.
|
||||
|
||||
Returns:
|
||||
Tuple of (all_allowed, list_of_issues)
|
||||
"""
|
||||
issues = []
|
||||
|
||||
for source in sources:
|
||||
if source in FORBIDDEN_SOURCES:
|
||||
issues.append(f"{source.value}: {FORBIDDEN_SOURCES[source]}")
|
||||
elif source in ALLOWED_SOURCES:
|
||||
if not ALLOWED_SOURCES[source]["commercial"]:
|
||||
issues.append(f"{source.value}: Commercial use not allowed")
|
||||
|
||||
return len(issues) == 0, issues
|
||||
237
geo-service/utils/minio_client.py
Normal file
237
geo-service/utils/minio_client.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""
|
||||
MinIO Client Utility
|
||||
S3-compatible storage operations for AOI bundles
|
||||
"""
|
||||
import os
|
||||
from typing import Optional, BinaryIO
|
||||
import structlog
|
||||
from minio import Minio
|
||||
from minio.error import S3Error
|
||||
|
||||
from config import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class MinioClient:
|
||||
"""
|
||||
Client for MinIO S3-compatible storage.
|
||||
|
||||
Used for storing generated AOI bundles and assets.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.endpoint = settings.minio_endpoint
|
||||
self.access_key = settings.minio_access_key
|
||||
self.secret_key = settings.minio_secret_key
|
||||
self.bucket = settings.minio_bucket
|
||||
self.secure = settings.minio_secure
|
||||
self._client: Optional[Minio] = None
|
||||
|
||||
@property
|
||||
def client(self) -> Minio:
|
||||
"""Get or create MinIO client instance."""
|
||||
if self._client is None:
|
||||
self._client = Minio(
|
||||
self.endpoint,
|
||||
access_key=self.access_key,
|
||||
secret_key=self.secret_key,
|
||||
secure=self.secure,
|
||||
)
|
||||
self._ensure_bucket_exists()
|
||||
return self._client
|
||||
|
||||
def _ensure_bucket_exists(self):
|
||||
"""Create the bucket if it doesn't exist."""
|
||||
try:
|
||||
if not self._client.bucket_exists(self.bucket):
|
||||
self._client.make_bucket(self.bucket)
|
||||
logger.info("Created MinIO bucket", bucket=self.bucket)
|
||||
except S3Error as e:
|
||||
logger.error("Error creating bucket", error=str(e))
|
||||
|
||||
async def upload_file(
|
||||
self,
|
||||
local_path: str,
|
||||
object_name: str,
|
||||
content_type: str = "application/octet-stream",
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Upload a file to MinIO.
|
||||
|
||||
Args:
|
||||
local_path: Path to local file
|
||||
object_name: Name in MinIO (can include path)
|
||||
content_type: MIME type of the file
|
||||
|
||||
Returns:
|
||||
Object URL or None on failure
|
||||
"""
|
||||
try:
|
||||
self.client.fput_object(
|
||||
self.bucket,
|
||||
object_name,
|
||||
local_path,
|
||||
content_type=content_type,
|
||||
)
|
||||
logger.info("Uploaded file to MinIO", object_name=object_name)
|
||||
return f"{self.endpoint}/{self.bucket}/{object_name}"
|
||||
except S3Error as e:
|
||||
logger.error("Error uploading file", error=str(e))
|
||||
return None
|
||||
|
||||
async def upload_bytes(
|
||||
self,
|
||||
data: bytes,
|
||||
object_name: str,
|
||||
content_type: str = "application/octet-stream",
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Upload bytes to MinIO.
|
||||
|
||||
Args:
|
||||
data: Bytes to upload
|
||||
object_name: Name in MinIO
|
||||
content_type: MIME type
|
||||
|
||||
Returns:
|
||||
Object URL or None on failure
|
||||
"""
|
||||
from io import BytesIO
|
||||
|
||||
try:
|
||||
stream = BytesIO(data)
|
||||
self.client.put_object(
|
||||
self.bucket,
|
||||
object_name,
|
||||
stream,
|
||||
length=len(data),
|
||||
content_type=content_type,
|
||||
)
|
||||
logger.info("Uploaded bytes to MinIO", object_name=object_name, size=len(data))
|
||||
return f"{self.endpoint}/{self.bucket}/{object_name}"
|
||||
except S3Error as e:
|
||||
logger.error("Error uploading bytes", error=str(e))
|
||||
return None
|
||||
|
||||
async def download_file(
|
||||
self,
|
||||
object_name: str,
|
||||
local_path: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Download a file from MinIO.
|
||||
|
||||
Args:
|
||||
object_name: Name in MinIO
|
||||
local_path: Destination path
|
||||
|
||||
Returns:
|
||||
True on success
|
||||
"""
|
||||
try:
|
||||
self.client.fget_object(self.bucket, object_name, local_path)
|
||||
logger.info("Downloaded file from MinIO", object_name=object_name)
|
||||
return True
|
||||
except S3Error as e:
|
||||
logger.error("Error downloading file", error=str(e))
|
||||
return False
|
||||
|
||||
async def get_bytes(self, object_name: str) -> Optional[bytes]:
|
||||
"""
|
||||
Get object content as bytes.
|
||||
|
||||
Args:
|
||||
object_name: Name in MinIO
|
||||
|
||||
Returns:
|
||||
File content or None
|
||||
"""
|
||||
try:
|
||||
response = self.client.get_object(self.bucket, object_name)
|
||||
data = response.read()
|
||||
response.close()
|
||||
response.release_conn()
|
||||
return data
|
||||
except S3Error as e:
|
||||
logger.error("Error getting bytes", error=str(e))
|
||||
return None
|
||||
|
||||
async def delete_object(self, object_name: str) -> bool:
|
||||
"""
|
||||
Delete an object from MinIO.
|
||||
|
||||
Args:
|
||||
object_name: Name in MinIO
|
||||
|
||||
Returns:
|
||||
True on success
|
||||
"""
|
||||
try:
|
||||
self.client.remove_object(self.bucket, object_name)
|
||||
logger.info("Deleted object from MinIO", object_name=object_name)
|
||||
return True
|
||||
except S3Error as e:
|
||||
logger.error("Error deleting object", error=str(e))
|
||||
return False
|
||||
|
||||
async def list_objects(self, prefix: str = "") -> list[str]:
|
||||
"""
|
||||
List objects in the bucket.
|
||||
|
||||
Args:
|
||||
prefix: Filter by prefix
|
||||
|
||||
Returns:
|
||||
List of object names
|
||||
"""
|
||||
try:
|
||||
objects = self.client.list_objects(self.bucket, prefix=prefix)
|
||||
return [obj.object_name for obj in objects]
|
||||
except S3Error as e:
|
||||
logger.error("Error listing objects", error=str(e))
|
||||
return []
|
||||
|
||||
async def get_presigned_url(
|
||||
self,
|
||||
object_name: str,
|
||||
expiry_hours: int = 24,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Get a presigned URL for downloading an object.
|
||||
|
||||
Args:
|
||||
object_name: Name in MinIO
|
||||
expiry_hours: URL expiry time in hours
|
||||
|
||||
Returns:
|
||||
Presigned URL or None
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
url = self.client.presigned_get_object(
|
||||
self.bucket,
|
||||
object_name,
|
||||
expires=timedelta(hours=expiry_hours),
|
||||
)
|
||||
return url
|
||||
except S3Error as e:
|
||||
logger.error("Error generating presigned URL", error=str(e))
|
||||
return None
|
||||
|
||||
async def object_exists(self, object_name: str) -> bool:
|
||||
"""Check if an object exists."""
|
||||
try:
|
||||
self.client.stat_object(self.bucket, object_name)
|
||||
return True
|
||||
except S3Error:
|
||||
return False
|
||||
|
||||
async def get_object_size(self, object_name: str) -> Optional[int]:
|
||||
"""Get the size of an object in bytes."""
|
||||
try:
|
||||
stat = self.client.stat_object(self.bucket, object_name)
|
||||
return stat.size
|
||||
except S3Error:
|
||||
return None
|
||||
Reference in New Issue
Block a user