This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
Benjamin Admin 21a844cb8a fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 09:51:32 +01:00

162 lines
5.0 KiB
Python

"""
MinIO/S3 Storage Integration für Content Files
"""
from minio import Minio
from minio.error import S3Error
import os
import io
import uuid
from typing import BinaryIO, Optional
from pathlib import Path
import mimetypes
class StorageService:
"""MinIO/S3 Storage Service"""
def __init__(self):
self.endpoint = os.getenv("MINIO_ENDPOINT", "localhost:9000")
self.access_key = os.getenv("MINIO_ACCESS_KEY", "minioadmin")
self.secret_key = os.getenv("MINIO_SECRET_KEY", "minioadmin")
self.secure = os.getenv("MINIO_SECURE", "false").lower() == "true"
self.bucket_name = os.getenv("MINIO_BUCKET", "breakpilot-content")
# Initialize MinIO client
self.client = Minio(
self.endpoint,
access_key=self.access_key,
secret_key=self.secret_key,
secure=self.secure
)
# Ensure bucket exists
self._ensure_bucket()
def _ensure_bucket(self):
"""Create bucket if it doesn't exist"""
try:
if not self.client.bucket_exists(self.bucket_name):
self.client.make_bucket(self.bucket_name)
print(f"✅ Created MinIO bucket: {self.bucket_name}")
# Set public read policy for content
policy = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {"AWS": "*"},
"Action": ["s3:GetObject"],
"Resource": [f"arn:aws:s3:::{self.bucket_name}/*"]
}
]
}
import json
self.client.set_bucket_policy(self.bucket_name, json.dumps(policy))
except S3Error as e:
print(f"⚠️ MinIO bucket check failed: {e}")
async def upload_file(
self,
file_data: BinaryIO,
file_name: str,
content_type: Optional[str] = None,
creator_id: str = "unknown"
) -> dict:
"""
Upload file to MinIO
Args:
file_data: File binary data
file_name: Original filename
content_type: MIME type
creator_id: Creator ID for folder organization
Returns:
dict with file_url, file_name, file_size
"""
try:
# Generate unique filename
file_ext = Path(file_name).suffix
unique_name = f"{uuid.uuid4()}{file_ext}"
object_name = f"{creator_id}/{unique_name}"
# Detect content type if not provided
if not content_type:
content_type, _ = mimetypes.guess_type(file_name)
content_type = content_type or "application/octet-stream"
# Get file size
file_data.seek(0, os.SEEK_END)
file_size = file_data.tell()
file_data.seek(0)
# Upload to MinIO
self.client.put_object(
bucket_name=self.bucket_name,
object_name=object_name,
data=file_data,
length=file_size,
content_type=content_type
)
# Generate public URL
file_url = f"http://{self.endpoint}/{self.bucket_name}/{object_name}"
return {
"file_url": file_url,
"file_name": file_name,
"file_size": file_size,
"content_type": content_type,
"object_name": object_name
}
except S3Error as e:
raise Exception(f"MinIO upload failed: {e}")
async def delete_file(self, object_name: str):
"""Delete file from MinIO"""
try:
self.client.remove_object(self.bucket_name, object_name)
except S3Error as e:
raise Exception(f"MinIO delete failed: {e}")
async def get_presigned_url(self, object_name: str, expires_in: int = 3600) -> str:
"""
Generate presigned URL for private files
Args:
object_name: Object path in bucket
expires_in: URL expiry in seconds (default 1 hour)
Returns:
Presigned URL
"""
try:
from datetime import timedelta
url = self.client.presigned_get_object(
self.bucket_name,
object_name,
expires=timedelta(seconds=expires_in)
)
return url
except S3Error as e:
raise Exception(f"MinIO presigned URL failed: {e}")
async def upload_thumbnail(
self,
file_data: BinaryIO,
file_name: str,
creator_id: str
) -> str:
"""Upload thumbnail image"""
result = await self.upload_file(
file_data=file_data,
file_name=file_name,
content_type="image/jpeg",
creator_id=f"{creator_id}/thumbnails"
)
return result["file_url"]
# Global storage instance
storage = StorageService()