fix: 4 bugs from IHK Konstanz scan validation

1. DSE-Matcher: Google/YouTube false match — now requires 2+ word match
   for provider-name fallback, not just "Google" matching YouTube section
2. AGB/Widerrufsbelehrung: only_ecommerce flag — skips for non-shop
   websites (detected via payment providers, cart keywords)
3. DSE-internal link following — scanner now discovers links WITHIN the
   privacy policy and scans those too (finds regional DSE sub-pages)
4. Expanded keyword synonyms for DSE mandatory checks:
   - "Zweck und Rechtsgrundlage" now matches "zwecke"
   - "behoerdlichen datenschutzbeauftragt" matches DSB
   - "aufsichtsbehörde" with umlaut matches

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-04-29 17:57:19 +02:00
parent 0f3ba9c207
commit fff47cc52e
3 changed files with 70 additions and 7 deletions
@@ -64,8 +64,21 @@ def match_service_to_dse(
)
# Step 2: Search for provider name (e.g., "Google" for "Google Analytics")
# But only if the provider name is specific enough — avoid "Google" matching YouTube
provider = service_name.split()[0] if " " in service_name else service_name
if len(provider) < 4 or provider.lower() in ("the", "a", "an"):
provider = service_name # Too short/generic, use full name
section = find_section_by_content(sections, provider)
# Verify: the section must actually be about THIS service, not just mention the provider
if section and provider.lower() != service_name.lower():
# Check if the full service name or a close variant is in the section
content_lower = section.content.lower()
service_words = service_name.lower().split()
# At least 2 words of the service name must match (not just "Google")
matching_words = sum(1 for w in service_words if w in content_lower)
if matching_words < 2 and service_name.lower() not in content_lower:
section = None # False match — provider name found but wrong context
if section:
original = _extract_relevant_paragraph(section.content, provider)
@@ -51,6 +51,7 @@ MANDATORY_DOCUMENTS = [
"legal_ref": "§305 BGB (bei Vertragsschluss)",
"patterns": [r"agb", r"nutzungsbedingung", r"terms"],
"severity": "MEDIUM",
"only_ecommerce": True, # Nur bei Shops/Buchungsseiten
},
{
"id": "widerruf",
@@ -58,6 +59,7 @@ MANDATORY_DOCUMENTS = [
"legal_ref": "§355 BGB, Art. 246a §1 EGBGB (nur Fernabsatz)",
"patterns": [r"widerruf", r"cancellation.?policy", r"right.?of.?withdrawal"],
"severity": "MEDIUM",
"only_ecommerce": True, # Nur bei Fernabsatzvertraegen
},
]
@@ -78,21 +80,27 @@ MANDATORY_DSE_CONTENT = [
"id": "dsb_kontakt",
"name": "Kontaktdaten des Datenschutzbeauftragten",
"legal_ref": "Art. 13 Abs. 1 lit. b DSGVO",
"keywords": ["datenschutzbeauftragt", "data protection officer", "dsb", "dpo"],
"keywords": ["datenschutzbeauftragt", "data protection officer", "dsb", "dpo",
"behördlichen datenschutz", "behoerdlichen datenschutz",
"datenschutz@", "datenschutzbeauftragter"],
"severity": "HIGH",
},
{
"id": "zwecke",
"name": "Zwecke der Datenverarbeitung",
"legal_ref": "Art. 13 Abs. 1 lit. c DSGVO",
"keywords": ["zweck", "purpose", "verarbeitungszweck"],
"keywords": ["zweck", "purpose", "verarbeitungszweck", "verarbeitungszwecke",
"wozu", "wofuer", "zu welchem zweck", "nutzungszweck",
"zweck und rechtsgrundlage", "zwecke der verarbeitung"],
"severity": "HIGH",
},
{
"id": "rechtsgrundlage",
"name": "Rechtsgrundlagen der Verarbeitung",
"legal_ref": "Art. 13 Abs. 1 lit. c DSGVO",
"keywords": ["rechtsgrundlage", "legal basis", "art. 6", "art.6"],
"keywords": ["rechtsgrundlage", "legal basis", "art. 6", "art.6",
"berechtigtes interesse", "einwilligung", "vertragserfuellung",
"vertragserfüllung", "rechtliche verpflichtung"],
"severity": "HIGH",
},
{
@@ -116,8 +124,9 @@ MANDATORY_DSE_CONTENT = [
"id": "beschwerderecht",
"name": "Beschwerderecht bei Aufsichtsbehoerde",
"legal_ref": "Art. 13 Abs. 2 lit. d DSGVO",
"keywords": ["aufsichtsbehoerde", "beschwerde", "supervisory authority",
"datenschutzbehoerde"],
"keywords": ["aufsichtsbehoerde", "aufsichtsbehörde", "beschwerde",
"supervisory authority", "datenschutzbehoerde",
"landesbeauftragte", "bundesdatenschutz", "bfdi"],
"severity": "MEDIUM",
},
{
@@ -183,13 +192,32 @@ MANDATORY_IMPRESSUM_CONTENT = [
]
ECOMMERCE_INDICATORS = [
r"warenkorb", r"cart", r"shop", r"bestell", r"order",
r"checkout", r"kasse", r"buy", r"kaufen", r"add.?to.?cart",
r"stripe|paypal|klarna|mollie|adyen", # Payment providers
]
def _is_ecommerce(scanned_pages: list[str], html_content: str = "") -> bool:
"""Detect if website is an e-commerce/transactional site."""
all_text = " ".join(scanned_pages).lower() + " " + html_content.lower()
return any(re.search(p, all_text) for p in ECOMMERCE_INDICATORS)
def check_mandatory_documents(
scanned_pages: list[str], page_status: dict[str, int],
html_content: str = "",
) -> list[MandatoryFinding]:
"""Check if mandatory documents/pages exist on the website."""
findings = []
is_shop = _is_ecommerce(scanned_pages, html_content)
for doc in MANDATORY_DOCUMENTS:
# Skip e-commerce-only checks for non-shop websites
if doc.get("only_ecommerce") and not is_shop:
continue
found = False
for page in scanned_pages:
if any(re.search(p, page, re.IGNORECASE) for p in doc["patterns"]):
@@ -58,9 +58,11 @@ AI_TEXT_PATTERNS = [
FOOTER_LINK_PATTERNS = [
(r'href="([^"]*(?:impressum|imprint|legal-notice)[^"]*)"', "impressum"),
(r'href="([^"]*(?:datenschutz|privacy|dsgvo)[^"]*)"', "datenschutz"),
(r'href="([^"]*(?:datenschutz|privacy|dsgvo|hinweise.?zum.?datenschutz)[^"]*)"', "datenschutz"),
(r'href="([^"]*(?:agb|terms|nutzungsbedingungen)[^"]*)"', "agb"),
(r'href="([^"]*(?:cookie)[^"]*)"', "cookies"),
# Also find deep DSE links (regional pages, sub-pages)
(r'href="([^"]*(?:datenschutzinformation|datenschutzerklaerung|datenschutzerkl)[^"]*)"', "datenschutz_deep"),
]
@@ -97,11 +99,31 @@ async def scan_website(base_url: str) -> ScanResult:
_detect_services(start_html, origin, result)
_detect_ai_mentions(start_html, origin, result)
# Process other pages
# Process other pages + discover DSE-internal links
dse_internal_urls = set()
for url, html in zip(other_urls, other_htmls):
if isinstance(html, str) and html:
_detect_services(html, url, result)
_detect_ai_mentions(html, url, result)
# If this is a DSE page, find links within it
if re.search(r"datenschutz|privacy|dsgvo", url, re.IGNORECASE):
for pattern, _ in FOOTER_LINK_PATTERNS:
for match in re.finditer(pattern, html, re.IGNORECASE):
href = match.group(1)
if href.startswith("/"):
href = urljoin(origin, href)
if href.startswith("http") and href not in page_urls:
dse_internal_urls.add(href)
# 4. Follow DSE-internal links (additional pages linked from privacy policy)
if dse_internal_urls:
extra_urls = [u for u in list(dse_internal_urls)[:5] if u not in page_urls]
if extra_urls:
extra_tasks = [_fetch_page(client, u, result) for u in extra_urls]
extra_htmls = await asyncio.gather(*extra_tasks, return_exceptions=True)
for url, html in zip(extra_urls, extra_htmls):
if isinstance(html, str) and html:
_detect_services(html, url, result)
# Deduplicate services
seen = set()