feat: Add DevSecOps tools, Woodpecker proxy, Vault persistent storage, pitch-deck annex slides
All checks were successful
CI / test-bqas (push) Successful in 32s
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-consent (push) Successful in 46s
CI / test-python-voice (push) Successful in 38s

- Install Gitleaks, Trivy, Grype, Syft, Semgrep, Bandit in backend-core Dockerfile
- Add Woodpecker SQLite proxy API (fallback without API token)
- Mount woodpecker_data volume read-only to backend-core
- Add backend proxy fallback in admin-core Woodpecker route
- Add Vault file-based persistent storage (config.hcl, init-vault.sh)
- Auto-init, unseal and root-token persistence for Vault
- Add 6 pitch-deck annex slides (Assumptions, Architecture, GTM, Regulatory, Engineering, AI Pipeline)
- Dynamic margin/amortization KPIs in BusinessModelSlide
- Market sources modal with citations in MarketSlide
- Redesign nginx landing page to 3-column layout (Lehrer/Compliance/Core)
- Extend MkDocs nav with Services and SDK documentation sections
- Add SDK Protection architecture doc

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Boenisch
2026-02-17 15:42:43 +01:00
parent eb43b40dd0
commit b7d21daa24
31 changed files with 3323 additions and 299 deletions

View File

@@ -18,7 +18,8 @@ COPY requirements.txt .
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
pip install --no-cache-dir -r requirements.txt && \
pip install --no-cache-dir semgrep bandit
# ---------- Runtime stage ----------
FROM python:3.12-slim-bookworm
@@ -38,8 +39,26 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libgl1 \
libglib2.0-0 \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
# Install DevSecOps tools (gitleaks, trivy, grype, syft)
ARG TARGETARCH=arm64
RUN set -eux; \
# Gitleaks
GITLEAKS_VERSION=8.21.2; \
if [ "$TARGETARCH" = "arm64" ]; then GITLEAKS_ARCH=arm64; else GITLEAKS_ARCH=x64; fi; \
curl -sSfL "https://github.com/gitleaks/gitleaks/releases/download/v${GITLEAKS_VERSION}/gitleaks_${GITLEAKS_VERSION}_linux_${GITLEAKS_ARCH}.tar.gz" \
| tar xz -C /usr/local/bin gitleaks; \
# Trivy
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin; \
# Grype
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin; \
# Syft
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin; \
# Verify
gitleaks version && trivy --version && grype version && syft version
# Copy virtualenv from builder
COPY --from=builder /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

View File

@@ -25,6 +25,7 @@ from email_template_api import (
)
from system_api import router as system_router
from security_api import router as security_router
from woodpecker_proxy_api import router as woodpecker_router
# ---------------------------------------------------------------------------
# Middleware imports
@@ -105,6 +106,7 @@ app.include_router(system_router) # already has paths defined in r
# Security / DevSecOps dashboard
app.include_router(security_router, prefix="/api")
app.include_router(woodpecker_router, prefix="/api")
# ---------------------------------------------------------------------------
# Startup / Shutdown events

View File

@@ -34,6 +34,9 @@ BACKEND_DIR = Path(__file__).parent
REPORTS_DIR = BACKEND_DIR / "security-reports"
SCRIPTS_DIR = BACKEND_DIR / "scripts"
# Projekt-Root fuer Security-Scans
PROJECT_ROOT = BACKEND_DIR
# Sicherstellen, dass das Reports-Verzeichnis existiert
try:
REPORTS_DIR.mkdir(exist_ok=True)

View File

@@ -0,0 +1,133 @@
"""
Woodpecker CI Proxy API
Liest Pipeline-Daten direkt aus der Woodpecker SQLite-Datenbank.
Wird als Fallback verwendet, wenn kein WOODPECKER_TOKEN konfiguriert ist.
"""
import sqlite3
from pathlib import Path
from datetime import datetime
from fastapi import APIRouter, Query
router = APIRouter(prefix="/v1/woodpecker", tags=["Woodpecker CI"])
WOODPECKER_DB = Path("/woodpecker-data/woodpecker.sqlite")
def get_db():
if not WOODPECKER_DB.exists():
return None
conn = sqlite3.connect(f"file:{WOODPECKER_DB}?mode=ro", uri=True)
conn.row_factory = sqlite3.Row
return conn
@router.get("/status")
async def get_status():
conn = get_db()
if not conn:
return {"status": "offline", "error": "Woodpecker DB nicht gefunden"}
try:
repos = [dict(r) for r in conn.execute(
"SELECT id, name, full_name, active FROM repos ORDER BY id"
).fetchall()]
total_pipelines = conn.execute("SELECT COUNT(*) FROM pipelines").fetchone()[0]
success = conn.execute("SELECT COUNT(*) FROM pipelines WHERE status='success'").fetchone()[0]
failure = conn.execute("SELECT COUNT(*) FROM pipelines WHERE status='failure'").fetchone()[0]
latest = conn.execute("SELECT MAX(created) FROM pipelines").fetchone()[0]
return {
"status": "online",
"repos": repos,
"stats": {
"total_pipelines": total_pipelines,
"success": success,
"failure": failure,
"success_rate": round(success / total_pipelines * 100, 1) if total_pipelines > 0 else 0,
},
"last_activity": datetime.fromtimestamp(latest).isoformat() if latest else None,
}
finally:
conn.close()
@router.get("/pipelines")
async def get_pipelines(
repo: int = Query(default=0, description="Repo ID (0 = alle)"),
limit: int = Query(default=10, ge=1, le=100),
):
conn = get_db()
if not conn:
return {"status": "offline", "pipelines": [], "lastUpdate": datetime.now().isoformat()}
try:
base_sql = """SELECT p.id, p.repo_id, p.number, p.status, p.event, p.branch,
p."commit", p.message, p.author, p.created, p.started, p.finished,
r.name as repo_name
FROM pipelines p
JOIN repos r ON r.id = p.repo_id"""
if repo > 0:
rows = conn.execute(
base_sql + " WHERE p.repo_id = ? ORDER BY p.id DESC LIMIT ?",
(repo, limit)
).fetchall()
else:
rows = conn.execute(
base_sql + " ORDER BY p.id DESC LIMIT ?",
(limit,)
).fetchall()
pipelines = []
for r in rows:
p = dict(r)
# Get steps directly (steps.pipeline_id links to pipelines.id)
steps = [dict(s) for s in conn.execute(
"""SELECT s.name, s.state, s.exit_code, s.error
FROM steps s
WHERE s.pipeline_id = ?
ORDER BY s.pid""",
(p["id"],)
).fetchall()]
p["steps"] = steps
p["commit"] = (p.get("commit") or "")[:7]
msg = p.get("message") or ""
p["message"] = msg.split("\n")[0][:100]
pipelines.append(p)
return {
"status": "online",
"pipelines": pipelines,
"lastUpdate": datetime.now().isoformat(),
}
finally:
conn.close()
@router.get("/repos")
async def get_repos():
conn = get_db()
if not conn:
return []
try:
repos = []
for r in conn.execute("SELECT id, name, full_name, active FROM repos ORDER BY id").fetchall():
repo = dict(r)
latest = conn.execute(
'SELECT status, created FROM pipelines WHERE repo_id = ? ORDER BY id DESC LIMIT 1',
(repo["id"],)
).fetchone()
if latest:
repo["last_status"] = latest["status"]
repo["last_activity"] = datetime.fromtimestamp(latest["created"]).isoformat()
repos.append(repo)
return repos
finally:
conn.close()