Compare commits
13 Commits
65177d3ff7
...
coolify
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
520a0f401c | ||
|
|
6adf1fe1eb | ||
|
|
2ac6559291 | ||
|
|
52618a0630 | ||
|
|
e1a84fd568 | ||
|
|
dd0bda05be | ||
|
|
4c68666c5c | ||
|
|
46b1fdc20f | ||
|
|
445cbc3100 | ||
|
|
8fe4473205 | ||
|
|
07dbd78962 | ||
|
|
e9487a31c6 | ||
|
|
0fb4a7e359 |
@@ -140,117 +140,20 @@ jobs:
|
||||
python -m pytest tests/bqas/ -v --tb=short || true
|
||||
|
||||
# ========================================
|
||||
# Build & Deploy auf Hetzner (nur main, kein PR)
|
||||
# Deploy via Coolify (nur main, kein PR)
|
||||
# ========================================
|
||||
|
||||
deploy-hetzner:
|
||||
deploy-coolify:
|
||||
name: Deploy
|
||||
runs-on: docker
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- test-go-consent
|
||||
container: docker:27-cli
|
||||
container:
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Deploy
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
set -euo pipefail
|
||||
DEPLOY_DIR="/opt/breakpilot-core"
|
||||
COMPOSE_FILES="-f docker-compose.yml -f docker-compose.hetzner.yml"
|
||||
COMMIT_SHA="${GITHUB_SHA:-unknown}"
|
||||
SHORT_SHA="${COMMIT_SHA:0:8}"
|
||||
REPO_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
|
||||
# Services die deployed werden
|
||||
SERVICES="postgres valkey qdrant minio ollama mailpit embedding-service rag-service backend-core consent-service health-aggregator"
|
||||
|
||||
echo "=== BreakPilot Core Deploy ==="
|
||||
echo "Commit: ${SHORT_SHA}"
|
||||
echo "Deploy Dir: ${DEPLOY_DIR}"
|
||||
echo "Services: ${SERVICES}"
|
||||
echo ""
|
||||
|
||||
# 1. Repo auf dem Host erstellen/aktualisieren via Helper-Container
|
||||
echo "=== Updating code on host ==="
|
||||
docker run --rm \
|
||||
-v "${DEPLOY_DIR}:${DEPLOY_DIR}" \
|
||||
--entrypoint sh \
|
||||
alpine/git:latest \
|
||||
-c "
|
||||
if [ ! -d '${DEPLOY_DIR}/.git' ]; then
|
||||
echo 'Erstmaliges Klonen nach ${DEPLOY_DIR}...'
|
||||
git clone '${REPO_URL}' '${DEPLOY_DIR}'
|
||||
else
|
||||
cd '${DEPLOY_DIR}'
|
||||
git fetch origin main
|
||||
git reset --hard origin/main
|
||||
fi
|
||||
"
|
||||
echo "Code aktualisiert auf ${SHORT_SHA}"
|
||||
|
||||
# 2. .env sicherstellen
|
||||
docker run --rm -v "${DEPLOY_DIR}:${DEPLOY_DIR}" alpine \
|
||||
sh -c "
|
||||
if [ ! -f '${DEPLOY_DIR}/.env' ]; then
|
||||
echo 'WARNUNG: ${DEPLOY_DIR}/.env fehlt!'
|
||||
echo 'Erstelle .env aus .env.example mit Defaults...'
|
||||
if [ -f '${DEPLOY_DIR}/.env.example' ]; then
|
||||
cp '${DEPLOY_DIR}/.env.example' '${DEPLOY_DIR}/.env'
|
||||
echo '.env aus .env.example erstellt'
|
||||
else
|
||||
echo 'Kein .env.example gefunden — Services starten mit Defaults'
|
||||
fi
|
||||
else
|
||||
echo '.env vorhanden'
|
||||
fi
|
||||
"
|
||||
|
||||
# 3. Shared Network erstellen (falls noch nicht vorhanden)
|
||||
docker network create breakpilot-network 2>/dev/null || true
|
||||
|
||||
# 4. Build + Deploy via Helper-Container
|
||||
echo ""
|
||||
echo "=== Building + Deploying ==="
|
||||
docker run --rm \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v "${DEPLOY_DIR}:${DEPLOY_DIR}" \
|
||||
-w "${DEPLOY_DIR}" \
|
||||
docker:27-cli \
|
||||
sh -c "
|
||||
set -e
|
||||
COMPOSE_FILES='-f docker-compose.yml -f docker-compose.hetzner.yml'
|
||||
|
||||
echo '=== Building Docker Images ==='
|
||||
docker compose \${COMPOSE_FILES} build --parallel \
|
||||
backend-core consent-service rag-service embedding-service health-aggregator
|
||||
|
||||
echo ''
|
||||
echo '=== Starting infrastructure ==='
|
||||
docker compose \${COMPOSE_FILES} up -d postgres valkey qdrant minio mailpit
|
||||
|
||||
echo 'Warte auf DB + Cache...'
|
||||
sleep 10
|
||||
|
||||
echo ''
|
||||
echo '=== Starting Ollama + pulling bge-m3 ==='
|
||||
docker compose \${COMPOSE_FILES} up -d ollama
|
||||
sleep 5
|
||||
|
||||
# bge-m3 Modell pullen (nur beim ersten Mal ~670MB)
|
||||
echo 'Pulling bge-m3 model (falls noch nicht vorhanden)...'
|
||||
docker exec bp-core-ollama ollama pull bge-m3 2>&1 || echo 'WARNUNG: bge-m3 pull fehlgeschlagen (wird spaeter nachgeholt)'
|
||||
|
||||
echo ''
|
||||
echo '=== Starting application services ==='
|
||||
docker compose \${COMPOSE_FILES} up -d \
|
||||
embedding-service rag-service backend-core consent-service health-aggregator
|
||||
|
||||
echo ''
|
||||
echo '=== Health Checks ==='
|
||||
sleep 15
|
||||
for svc in bp-core-postgres bp-core-valkey bp-core-qdrant bp-core-ollama bp-core-embedding-service bp-core-rag-service bp-core-backend bp-core-consent-service bp-core-health; do
|
||||
STATUS=\$(docker inspect --format='{{.State.Status}}' \"\${svc}\" 2>/dev/null || echo 'not found')
|
||||
echo \"\${svc}: \${STATUS}\"
|
||||
done
|
||||
"
|
||||
|
||||
echo ""
|
||||
echo "=== Deploy abgeschlossen: ${SHORT_SHA} ==="
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
@@ -7,10 +7,12 @@ on:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: docker
|
||||
container: alpine:latest
|
||||
steps:
|
||||
- name: Deploy via Coolify API
|
||||
run: |
|
||||
apk add --no-cache curl
|
||||
echo "Deploying breakpilot-core to Coolify..."
|
||||
HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
-X POST \
|
||||
|
||||
@@ -15,6 +15,7 @@ networks:
|
||||
volumes:
|
||||
valkey_data:
|
||||
embedding_models:
|
||||
paddleocr_models:
|
||||
|
||||
services:
|
||||
|
||||
@@ -141,6 +142,37 @@ services:
|
||||
networks:
|
||||
- breakpilot-network
|
||||
|
||||
# =========================================================
|
||||
# OCR SERVICE (PaddleOCR PP-OCRv5)
|
||||
# =========================================================
|
||||
paddleocr-service:
|
||||
build:
|
||||
context: ./paddleocr-service
|
||||
dockerfile: Dockerfile
|
||||
container_name: bp-core-paddleocr
|
||||
expose:
|
||||
- "8095"
|
||||
environment:
|
||||
PADDLEOCR_API_KEY: ${PADDLEOCR_API_KEY:-}
|
||||
FLAGS_use_mkldnn: "0"
|
||||
volumes:
|
||||
- paddleocr_models:/root/.paddleocr
|
||||
labels:
|
||||
- "traefik.http.services.paddleocr.loadbalancer.server.port=8095"
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 6G
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://127.0.0.1:8095/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 300s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- breakpilot-network
|
||||
|
||||
# =========================================================
|
||||
# HEALTH AGGREGATOR
|
||||
# =========================================================
|
||||
@@ -153,7 +185,7 @@ services:
|
||||
- "8099"
|
||||
environment:
|
||||
PORT: 8099
|
||||
CHECK_SERVICES: "valkey:6379,consent-service:8081,rag-service:8097,embedding-service:8087"
|
||||
CHECK_SERVICES: "valkey:6379,consent-service:8081,rag-service:8097,embedding-service:8087,paddleocr-service:8095"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://127.0.0.1:8099/health"]
|
||||
interval: 30s
|
||||
|
||||
16
paddleocr-service/Dockerfile
Normal file
16
paddleocr-service/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM python:3.11-slim
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libgl1 libglib2.0-0 libgomp1 curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8095
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
|
||||
CMD curl -f http://127.0.0.1:8095/health || exit 1
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8095"]
|
||||
110
paddleocr-service/main.py
Normal file
110
paddleocr-service/main.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""PaddleOCR Remote Service — PP-OCRv4 on x86_64 (CPU)."""
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
|
||||
import numpy as np
|
||||
from fastapi import FastAPI, File, Header, HTTPException, UploadFile
|
||||
from PIL import Image
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI(title="PaddleOCR Service")
|
||||
|
||||
_engine = None
|
||||
_ready = False
|
||||
_loading = False
|
||||
API_KEY = os.environ.get("PADDLEOCR_API_KEY", "")
|
||||
|
||||
|
||||
def _load_model():
|
||||
"""Load PaddleOCR model in background thread."""
|
||||
global _engine, _ready
|
||||
try:
|
||||
logger.info("Importing paddleocr...")
|
||||
from paddleocr import PaddleOCR
|
||||
|
||||
logger.info("Loading PaddleOCR model (PP-OCRv4, lang=en)...")
|
||||
_engine = PaddleOCR(
|
||||
lang="en",
|
||||
use_angle_cls=True,
|
||||
show_log=False,
|
||||
enable_mkldnn=False,
|
||||
use_gpu=False,
|
||||
)
|
||||
logger.info("PaddleOCR model loaded — running warmup...")
|
||||
# Warmup with tiny image to trigger any lazy init
|
||||
dummy = np.ones((30, 100, 3), dtype=np.uint8) * 255
|
||||
_engine.ocr(dummy)
|
||||
_ready = True
|
||||
logger.info("PaddleOCR ready to serve")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load PaddleOCR: {e}", exc_info=True)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
def startup_load_model():
|
||||
"""Start model loading in background so health check passes immediately."""
|
||||
global _loading
|
||||
_loading = True
|
||||
threading.Thread(target=_load_model, daemon=True).start()
|
||||
logger.info("Model loading started in background thread")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
if _ready:
|
||||
return {"status": "ok", "model": "PP-OCRv4"}
|
||||
if _loading:
|
||||
return {"status": "loading"}
|
||||
return {"status": "error"}
|
||||
|
||||
|
||||
@app.post("/ocr")
|
||||
async def ocr(
|
||||
file: UploadFile = File(...),
|
||||
x_api_key: str = Header(default=""),
|
||||
):
|
||||
if API_KEY and x_api_key != API_KEY:
|
||||
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||
|
||||
if not _ready:
|
||||
raise HTTPException(status_code=503, detail="Model still loading")
|
||||
|
||||
img_bytes = await file.read()
|
||||
img = Image.open(io.BytesIO(img_bytes)).convert("RGB")
|
||||
img_np = np.array(img)
|
||||
|
||||
try:
|
||||
result = _engine.ocr(img_np)
|
||||
except Exception as e:
|
||||
logger.error(f"OCR failed: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"OCR failed: {e}")
|
||||
|
||||
if not result or not result[0]:
|
||||
return {"words": [], "image_width": img_np.shape[1], "image_height": img_np.shape[0]}
|
||||
|
||||
words = []
|
||||
for line in result[0]:
|
||||
box, (text, conf) = line[0], line[1]
|
||||
x_min = min(p[0] for p in box)
|
||||
y_min = min(p[1] for p in box)
|
||||
x_max = max(p[0] for p in box)
|
||||
y_max = max(p[1] for p in box)
|
||||
words.append({
|
||||
"text": str(text).strip(),
|
||||
"left": int(x_min),
|
||||
"top": int(y_min),
|
||||
"width": int(x_max - x_min),
|
||||
"height": int(y_max - y_min),
|
||||
"conf": round(float(conf) * 100, 1),
|
||||
})
|
||||
|
||||
return {
|
||||
"words": words,
|
||||
"image_width": img_np.shape[1],
|
||||
"image_height": img_np.shape[0],
|
||||
}
|
||||
7
paddleocr-service/requirements.txt
Normal file
7
paddleocr-service/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
paddlepaddle>=2.6.0,<3.0.0
|
||||
paddleocr>=2.7.0,<3.0.0
|
||||
fastapi>=0.110.0
|
||||
uvicorn>=0.25.0
|
||||
python-multipart>=0.0.6
|
||||
Pillow>=10.0.0
|
||||
numpy>=1.24.0
|
||||
Reference in New Issue
Block a user