cb607bf228
Fundamental fix: scans now run asynchronously with progress polling.
Backend:
- POST /scan starts background task, returns scan_id immediately
- GET /scan/{scan_id} returns status + progress + result when done
- 7 progress steps shown: Website scan, DSI discovery, DSE analysis,
SOLL/IST comparison, corrections, report, email
- In-memory job store (dict with scan_id → status/result)
- No timeout limits on scan duration
Frontend:
- POST starts scan, receives scan_id
- Polls GET every 5 seconds (max 120 attempts = 10 min)
- Shows live progress message during scan
- Displays result when completed, error when failed
Proxy:
- POST timeout reduced to 30s (just starts the job)
- GET timeout 10s (just status check)
- No more 504/connection-dropped errors
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
148 lines
5.9 KiB
Python
148 lines
5.9 KiB
Python
"""
|
|
Agent scan helpers — summary builder and correction generator.
|
|
Extracted from agent_scan_routes.py to keep route file under 500 LOC.
|
|
"""
|
|
|
|
import logging
|
|
import os
|
|
import re
|
|
|
|
import httpx
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
async def add_corrections(findings: list, dse_text: str) -> None:
|
|
"""Add correction suggestions for pre-launch mode via LLM."""
|
|
for finding in findings:
|
|
if finding.severity in ("HIGH", "MEDIUM") and "MISSING" in finding.code:
|
|
service_name = finding.code.replace("DSE-MISSING-", "").replace("_", " ").title()
|
|
try:
|
|
ollama_url = os.environ.get("OLLAMA_URL", "http://host.docker.internal:11434")
|
|
ollama_model = os.environ.get("OLLAMA_MODEL", "qwen3.5:35b-a3b")
|
|
async with httpx.AsyncClient(timeout=120.0) as client:
|
|
resp = await client.post(f"{ollama_url}/api/generate", json={
|
|
"model": ollama_model,
|
|
"prompt": (
|
|
f"Erstelle einen einbaufertigen Textbaustein fuer eine deutsche "
|
|
f"Datenschutzerklaerung fuer den Dienst '{service_name}'. "
|
|
f"Enthalte: Ueberschrift, Anbietername mit Sitz, Zweck der Verarbeitung, "
|
|
f"Rechtsgrundlage nach DSGVO, Drittlandtransfer-Hinweis wenn noetig, "
|
|
f"Widerspruchsmoeglichkeit. Max 150 Woerter. "
|
|
f"Antworte NUR mit dem fertigen Textbaustein."
|
|
),
|
|
"stream": False,
|
|
})
|
|
data = resp.json()
|
|
raw = data.get("response", "").strip()
|
|
raw = re.sub(r"<think>.*?</think>", "", raw, flags=re.DOTALL).strip()
|
|
if raw and len(raw) > 50:
|
|
finding.correction = raw
|
|
except Exception as e:
|
|
logger.warning("Correction generation failed for %s: %s", service_name, e)
|
|
|
|
|
|
def build_scan_summary(
|
|
url: str, scan, comparison: dict, findings: list, is_live: bool,
|
|
discovered_docs: list | None = None,
|
|
) -> str:
|
|
"""Build German scan summary including DSI document results."""
|
|
mode = "PRUEFUNG LIVE-WEBSITE" if is_live else "INTERNE PRUEFUNG"
|
|
n_undoc = len(comparison["undocumented"])
|
|
n_ok = len(comparison["documented"])
|
|
n_outdated = len(comparison["outdated"])
|
|
n_findings = len(findings)
|
|
high = sum(1 for f in findings if f.severity == "HIGH")
|
|
|
|
parts = [
|
|
f"{mode} — Website-Scan",
|
|
f"URL: {url}",
|
|
f"Seiten gescannt: {len(scan.pages_scanned)}",
|
|
]
|
|
for page in scan.pages_scanned:
|
|
status = scan.missing_pages.get(page, 200)
|
|
marker = "\u2717" if status >= 400 else "\u2713"
|
|
parts.append(f" {marker} {page}" + (f" (HTTP {status})" if status >= 400 else ""))
|
|
parts.extend([
|
|
"",
|
|
"Dienstleister-Abgleich (DSE vs. Website):",
|
|
f" Korrekt dokumentiert: {n_ok}",
|
|
f" NICHT in DSE (Verstoss): {n_undoc}",
|
|
f" Veraltet in DSE: {n_outdated}",
|
|
"",
|
|
f"Findings: {n_findings} ({high} mit hoher Prioritaet)",
|
|
])
|
|
|
|
# DSI Documents section
|
|
if discovered_docs:
|
|
parts.extend([
|
|
"",
|
|
f"Rechtliche Dokumente gefunden: {len(discovered_docs)}",
|
|
])
|
|
for doc in discovered_docs:
|
|
pct = doc.completeness_pct if hasattr(doc, 'completeness_pct') else 0
|
|
fc = doc.findings_count if hasattr(doc, 'findings_count') else 0
|
|
wc = doc.word_count if hasattr(doc, 'word_count') else 0
|
|
status = "OK" if pct >= 80 else "LUECKENHAFT" if pct >= 50 else "MANGELHAFT"
|
|
dt = doc.doc_type if hasattr(doc, 'doc_type') else "unknown"
|
|
title = doc.title if hasattr(doc, 'title') else "?"
|
|
parts.append(
|
|
f" [{status}] {title} ({dt}, {wc} Woerter, "
|
|
f"{pct}% vollstaendig, {fc} Maengel)"
|
|
)
|
|
|
|
if findings:
|
|
parts.append("")
|
|
for f in findings[:20]:
|
|
sev = f.severity if hasattr(f, 'severity') else "?"
|
|
txt = f.text if hasattr(f, 'text') else str(f)
|
|
marker = "!!" if sev == "HIGH" else "!" if sev == "MEDIUM" else "i"
|
|
parts.append(f" [{marker}] {txt}")
|
|
|
|
if is_live and high > 0:
|
|
parts.extend([
|
|
"",
|
|
"ACHTUNG: Verstoesse auf einer bereits veroeffentlichten Website. "
|
|
"Sofortige Korrektur empfohlen.",
|
|
])
|
|
|
|
return "\n".join(parts)
|
|
|
|
|
|
async def fetch_dse_text(url: str, scanned_pages: list[str]) -> str:
|
|
"""Find and fetch the privacy policy page text."""
|
|
dse_url = None
|
|
for page in scanned_pages:
|
|
if re.search(r"datenschutz|privacy|dsgvo", page, re.IGNORECASE):
|
|
dse_url = page
|
|
break
|
|
if not dse_url:
|
|
dse_url = url
|
|
try:
|
|
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
|
|
resp = await client.get(dse_url, headers={"User-Agent": "BreakPilot-Compliance-Agent/1.0"})
|
|
html = resp.text
|
|
clean = re.sub(r"<(script|style)[^>]*>.*?</\1>", "", html, flags=re.DOTALL | re.IGNORECASE)
|
|
clean = re.sub(r"<[^>]+>", " ", clean)
|
|
clean = re.sub(r"\s+", " ", clean).strip()
|
|
return clean[:8000]
|
|
except Exception:
|
|
return ""
|
|
|
|
|
|
async def fetch_dse_html(url: str, scanned_pages: list[str]) -> str:
|
|
"""Fetch the raw HTML of the privacy policy page."""
|
|
dse_url = None
|
|
for page in scanned_pages:
|
|
if re.search(r"datenschutz|privacy|dsgvo", page, re.IGNORECASE):
|
|
dse_url = page
|
|
break
|
|
if not dse_url:
|
|
dse_url = url
|
|
try:
|
|
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
|
|
resp = await client.get(dse_url, headers={"User-Agent": "BreakPilot-Compliance-Agent/1.0"})
|
|
return resp.text
|
|
except Exception:
|
|
return ""
|