feat: add paddleocr-service directory to coolify branch
The docker-compose.coolify.yml references paddleocr-service/Dockerfile but the directory only existed on main. Coolify clones the coolify branch and needs the source files to build the container. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
16
paddleocr-service/Dockerfile
Normal file
16
paddleocr-service/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
libgl1 libglib2.0-0 libgomp1 curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
EXPOSE 8095
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
|
||||||
|
CMD curl -f http://127.0.0.1:8095/health || exit 1
|
||||||
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8095"]
|
||||||
115
paddleocr-service/main.py
Normal file
115
paddleocr-service/main.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
"""PaddleOCR Remote Service — PP-OCRv5 Latin auf x86_64."""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from fastapi import FastAPI, File, Header, HTTPException, UploadFile
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
app = FastAPI(title="PaddleOCR Service")
|
||||||
|
|
||||||
|
_engine = None
|
||||||
|
_ready = False
|
||||||
|
_loading = False
|
||||||
|
API_KEY = os.environ.get("PADDLEOCR_API_KEY", "")
|
||||||
|
|
||||||
|
|
||||||
|
def _load_model():
|
||||||
|
"""Load PaddleOCR model in background thread."""
|
||||||
|
global _engine, _ready
|
||||||
|
try:
|
||||||
|
logger.info("Importing paddleocr...")
|
||||||
|
from paddleocr import PaddleOCR
|
||||||
|
|
||||||
|
logger.info("Import done. Loading PaddleOCR model...")
|
||||||
|
# Try multiple init strategies for different PaddleOCR versions
|
||||||
|
inits = [
|
||||||
|
# PaddleOCR 3.x (no show_log)
|
||||||
|
dict(lang="en", ocr_version="PP-OCRv5", use_angle_cls=True),
|
||||||
|
# PaddleOCR 3.x with show_log
|
||||||
|
dict(lang="en", ocr_version="PP-OCRv5", use_angle_cls=True, show_log=False),
|
||||||
|
# PaddleOCR 2.8+ (latin)
|
||||||
|
dict(lang="latin", use_angle_cls=True, show_log=False),
|
||||||
|
# PaddleOCR 2.8+ (en, no version)
|
||||||
|
dict(lang="en", use_angle_cls=True, show_log=False),
|
||||||
|
]
|
||||||
|
for i, kwargs in enumerate(inits):
|
||||||
|
try:
|
||||||
|
_engine = PaddleOCR(**kwargs)
|
||||||
|
logger.info(f"PaddleOCR init succeeded with strategy {i}: {kwargs}")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(f"PaddleOCR init strategy {i} failed: {e}")
|
||||||
|
else:
|
||||||
|
raise RuntimeError("All PaddleOCR init strategies failed")
|
||||||
|
_ready = True
|
||||||
|
logger.info("PaddleOCR model loaded successfully — ready to serve")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load PaddleOCR model: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
def startup_load_model():
|
||||||
|
"""Start model loading in background so health check passes immediately."""
|
||||||
|
global _loading
|
||||||
|
_loading = True
|
||||||
|
thread = threading.Thread(target=_load_model, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
logger.info("Model loading started in background thread")
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
def health():
|
||||||
|
if _ready:
|
||||||
|
return {"status": "ok", "model": "PP-OCRv5-latin"}
|
||||||
|
if _loading:
|
||||||
|
return {"status": "loading"}
|
||||||
|
return {"status": "error"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/ocr")
|
||||||
|
async def ocr(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
x_api_key: str = Header(default=""),
|
||||||
|
):
|
||||||
|
if API_KEY and x_api_key != API_KEY:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||||
|
|
||||||
|
if not _ready:
|
||||||
|
raise HTTPException(status_code=503, detail="Model still loading")
|
||||||
|
|
||||||
|
img_bytes = await file.read()
|
||||||
|
img = Image.open(io.BytesIO(img_bytes)).convert("RGB")
|
||||||
|
img_np = np.array(img)
|
||||||
|
|
||||||
|
result = _engine.ocr(img_np)
|
||||||
|
|
||||||
|
words = []
|
||||||
|
for line in result[0] or []:
|
||||||
|
box, (text, conf) = line[0], line[1]
|
||||||
|
x_min = min(p[0] for p in box)
|
||||||
|
y_min = min(p[1] for p in box)
|
||||||
|
x_max = max(p[0] for p in box)
|
||||||
|
y_max = max(p[1] for p in box)
|
||||||
|
words.append(
|
||||||
|
{
|
||||||
|
"text": text.strip(),
|
||||||
|
"left": int(x_min),
|
||||||
|
"top": int(y_min),
|
||||||
|
"width": int(x_max - x_min),
|
||||||
|
"height": int(y_max - y_min),
|
||||||
|
"conf": round(conf * 100, 1),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"words": words,
|
||||||
|
"image_width": img_np.shape[1],
|
||||||
|
"image_height": img_np.shape[0],
|
||||||
|
}
|
||||||
7
paddleocr-service/requirements.txt
Normal file
7
paddleocr-service/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
paddlepaddle>=3.0.0
|
||||||
|
paddleocr>=2.9.0
|
||||||
|
fastapi>=0.110.0
|
||||||
|
uvicorn>=0.25.0
|
||||||
|
python-multipart>=0.0.6
|
||||||
|
Pillow>=10.0.0
|
||||||
|
numpy>=1.24.0
|
||||||
Reference in New Issue
Block a user