Some checks failed
Tests / Go Tests (push) Has been cancelled
Tests / Python Tests (push) Has been cancelled
Tests / Integration Tests (push) Has been cancelled
Tests / Go Lint (push) Has been cancelled
Tests / Python Lint (push) Has been cancelled
Tests / Security Scan (push) Has been cancelled
Tests / All Checks Passed (push) Has been cancelled
Security Scanning / Secret Scanning (push) Has been cancelled
Security Scanning / Dependency Vulnerability Scan (push) Has been cancelled
Security Scanning / Go Security Scan (push) Has been cancelled
Security Scanning / Python Security Scan (push) Has been cancelled
Security Scanning / Node.js Security Scan (push) Has been cancelled
Security Scanning / Docker Image Security (push) Has been cancelled
Security Scanning / Security Summary (push) Has been cancelled
CI/CD Pipeline / Go Tests (push) Has been cancelled
CI/CD Pipeline / Python Tests (push) Has been cancelled
CI/CD Pipeline / Website Tests (push) Has been cancelled
CI/CD Pipeline / Linting (push) Has been cancelled
CI/CD Pipeline / Security Scan (push) Has been cancelled
CI/CD Pipeline / Docker Build & Push (push) Has been cancelled
CI/CD Pipeline / Integration Tests (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / CI Summary (push) Has been cancelled
ci/woodpecker/manual/build-ci-image Pipeline was successful
ci/woodpecker/manual/main Pipeline failed
All services: admin-v2, studio-v2, website, ai-compliance-sdk, consent-service, klausur-service, voice-service, and infrastructure. Large PDFs and compiled binaries excluded via .gitignore.
162 lines
5.0 KiB
Python
162 lines
5.0 KiB
Python
"""
|
|
MinIO/S3 Storage Integration für Content Files
|
|
"""
|
|
from minio import Minio
|
|
from minio.error import S3Error
|
|
import os
|
|
import io
|
|
import uuid
|
|
from typing import BinaryIO, Optional
|
|
from pathlib import Path
|
|
import mimetypes
|
|
|
|
class StorageService:
|
|
"""MinIO/S3 Storage Service"""
|
|
|
|
def __init__(self):
|
|
self.endpoint = os.getenv("MINIO_ENDPOINT", "localhost:9000")
|
|
self.access_key = os.getenv("MINIO_ACCESS_KEY", "minioadmin")
|
|
self.secret_key = os.getenv("MINIO_SECRET_KEY", "minioadmin")
|
|
self.secure = os.getenv("MINIO_SECURE", "false").lower() == "true"
|
|
self.bucket_name = os.getenv("MINIO_BUCKET", "breakpilot-content")
|
|
|
|
# Initialize MinIO client
|
|
self.client = Minio(
|
|
self.endpoint,
|
|
access_key=self.access_key,
|
|
secret_key=self.secret_key,
|
|
secure=self.secure
|
|
)
|
|
|
|
# Ensure bucket exists
|
|
self._ensure_bucket()
|
|
|
|
def _ensure_bucket(self):
|
|
"""Create bucket if it doesn't exist"""
|
|
try:
|
|
if not self.client.bucket_exists(self.bucket_name):
|
|
self.client.make_bucket(self.bucket_name)
|
|
print(f"✅ Created MinIO bucket: {self.bucket_name}")
|
|
|
|
# Set public read policy for content
|
|
policy = {
|
|
"Version": "2012-10-17",
|
|
"Statement": [
|
|
{
|
|
"Effect": "Allow",
|
|
"Principal": {"AWS": "*"},
|
|
"Action": ["s3:GetObject"],
|
|
"Resource": [f"arn:aws:s3:::{self.bucket_name}/*"]
|
|
}
|
|
]
|
|
}
|
|
import json
|
|
self.client.set_bucket_policy(self.bucket_name, json.dumps(policy))
|
|
except S3Error as e:
|
|
print(f"⚠️ MinIO bucket check failed: {e}")
|
|
|
|
async def upload_file(
|
|
self,
|
|
file_data: BinaryIO,
|
|
file_name: str,
|
|
content_type: Optional[str] = None,
|
|
creator_id: str = "unknown"
|
|
) -> dict:
|
|
"""
|
|
Upload file to MinIO
|
|
|
|
Args:
|
|
file_data: File binary data
|
|
file_name: Original filename
|
|
content_type: MIME type
|
|
creator_id: Creator ID for folder organization
|
|
|
|
Returns:
|
|
dict with file_url, file_name, file_size
|
|
"""
|
|
try:
|
|
# Generate unique filename
|
|
file_ext = Path(file_name).suffix
|
|
unique_name = f"{uuid.uuid4()}{file_ext}"
|
|
object_name = f"{creator_id}/{unique_name}"
|
|
|
|
# Detect content type if not provided
|
|
if not content_type:
|
|
content_type, _ = mimetypes.guess_type(file_name)
|
|
content_type = content_type or "application/octet-stream"
|
|
|
|
# Get file size
|
|
file_data.seek(0, os.SEEK_END)
|
|
file_size = file_data.tell()
|
|
file_data.seek(0)
|
|
|
|
# Upload to MinIO
|
|
self.client.put_object(
|
|
bucket_name=self.bucket_name,
|
|
object_name=object_name,
|
|
data=file_data,
|
|
length=file_size,
|
|
content_type=content_type
|
|
)
|
|
|
|
# Generate public URL
|
|
file_url = f"http://{self.endpoint}/{self.bucket_name}/{object_name}"
|
|
|
|
return {
|
|
"file_url": file_url,
|
|
"file_name": file_name,
|
|
"file_size": file_size,
|
|
"content_type": content_type,
|
|
"object_name": object_name
|
|
}
|
|
|
|
except S3Error as e:
|
|
raise Exception(f"MinIO upload failed: {e}")
|
|
|
|
async def delete_file(self, object_name: str):
|
|
"""Delete file from MinIO"""
|
|
try:
|
|
self.client.remove_object(self.bucket_name, object_name)
|
|
except S3Error as e:
|
|
raise Exception(f"MinIO delete failed: {e}")
|
|
|
|
async def get_presigned_url(self, object_name: str, expires_in: int = 3600) -> str:
|
|
"""
|
|
Generate presigned URL for private files
|
|
|
|
Args:
|
|
object_name: Object path in bucket
|
|
expires_in: URL expiry in seconds (default 1 hour)
|
|
|
|
Returns:
|
|
Presigned URL
|
|
"""
|
|
try:
|
|
from datetime import timedelta
|
|
url = self.client.presigned_get_object(
|
|
self.bucket_name,
|
|
object_name,
|
|
expires=timedelta(seconds=expires_in)
|
|
)
|
|
return url
|
|
except S3Error as e:
|
|
raise Exception(f"MinIO presigned URL failed: {e}")
|
|
|
|
async def upload_thumbnail(
|
|
self,
|
|
file_data: BinaryIO,
|
|
file_name: str,
|
|
creator_id: str
|
|
) -> str:
|
|
"""Upload thumbnail image"""
|
|
result = await self.upload_file(
|
|
file_data=file_data,
|
|
file_name=file_name,
|
|
content_type="image/jpeg",
|
|
creator_id=f"{creator_id}/thumbnails"
|
|
)
|
|
return result["file_url"]
|
|
|
|
# Global storage instance
|
|
storage = StorageService()
|