All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 40s
CI / test-python-backend-compliance (push) Successful in 34s
CI / test-python-document-crawler (push) Successful in 26s
CI / test-python-dsms-gateway (push) Successful in 21s
- import_routes: GET /gap-analysis/{document_id} implementiert
- import_routes: Bug-Fix — gap_analysis_result vor try-Block initialisiert
(verhindert UnboundLocalError bei DB-Fehler)
- test_import_routes: 21 neue API-Endpoint-Tests (59 total, alle grün)
- test_screening_routes: 18 neue API-Endpoint-Tests (74 total, alle grün)
- 031_modules.sql: Migration für compliance_service_modules,
compliance_module_regulations, compliance_module_risks
- test_module_routes: 20 neue Tests für Module-Registry-Routen (alle grün)
- freigabe-module.md: MkDocs-Seite für Import/Screening/Modules/RAG
- mkdocs.yml: Nav-Eintrag "Freigabe-Module (Paket 2)"
Gesamt: 146 neue Tests, alle bestanden
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
692 lines
25 KiB
Python
692 lines
25 KiB
Python
"""Tests for System Screening routes (screening_routes.py)."""
|
|
|
|
import json
|
|
import pytest
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
from compliance.api.screening_routes import (
|
|
parse_package_lock,
|
|
parse_requirements_txt,
|
|
parse_yarn_lock,
|
|
detect_and_parse,
|
|
generate_sbom,
|
|
map_osv_severity,
|
|
extract_fix_version,
|
|
)
|
|
|
|
|
|
class TestParsePackageLock:
|
|
"""Tests for package-lock.json parsing."""
|
|
|
|
def test_v2_format(self):
|
|
data = json.dumps({
|
|
"packages": {
|
|
"": {"name": "my-app", "version": "1.0.0"},
|
|
"node_modules/react": {"version": "18.3.0", "license": "MIT"},
|
|
"node_modules/lodash": {"version": "4.17.21", "license": "MIT"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert len(components) == 2
|
|
names = [c["name"] for c in components]
|
|
assert "react" in names
|
|
assert "lodash" in names
|
|
|
|
def test_v1_format(self):
|
|
data = json.dumps({
|
|
"dependencies": {
|
|
"express": {"version": "4.18.2"},
|
|
"cors": {"version": "2.8.5"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert len(components) == 2
|
|
|
|
def test_empty_json(self):
|
|
assert parse_package_lock("{}") == []
|
|
|
|
def test_invalid_json(self):
|
|
assert parse_package_lock("not json") == []
|
|
|
|
def test_root_package_skipped(self):
|
|
data = json.dumps({
|
|
"packages": {
|
|
"": {"name": "root", "version": "1.0.0"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert len(components) == 0
|
|
|
|
|
|
class TestParseRequirementsTxt:
|
|
"""Tests for requirements.txt parsing."""
|
|
|
|
def test_pinned_versions(self):
|
|
content = "fastapi==0.123.9\nuvicorn==0.38.0\npydantic==2.12.5"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 3
|
|
assert components[0]["name"] == "fastapi"
|
|
assert components[0]["version"] == "0.123.9"
|
|
assert components[0]["ecosystem"] == "PyPI"
|
|
|
|
def test_minimum_versions(self):
|
|
content = "idna>=3.7\ncryptography>=42.0.0"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 2
|
|
assert components[0]["version"] == "3.7"
|
|
|
|
def test_comments_and_blanks_ignored(self):
|
|
content = "# Comment\n\nfastapi==1.0.0\n# Another comment\n-r base.txt"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 1
|
|
|
|
def test_bare_package_name(self):
|
|
content = "requests"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 1
|
|
assert components[0]["version"] == "latest"
|
|
|
|
def test_empty_content(self):
|
|
assert parse_requirements_txt("") == []
|
|
|
|
|
|
class TestParseYarnLock:
|
|
"""Tests for yarn.lock parsing (basic)."""
|
|
|
|
def test_basic_format(self):
|
|
content = '"react@^18.0.0":\n version "18.3.0"\n"lodash@^4.17.0":\n version "4.17.21"'
|
|
components = parse_yarn_lock(content)
|
|
assert len(components) == 2
|
|
|
|
|
|
class TestDetectAndParse:
|
|
"""Tests for file type detection and parsing."""
|
|
|
|
def test_package_lock_detection(self):
|
|
data = json.dumps({"packages": {"node_modules/x": {"version": "1.0"}}})
|
|
components, ecosystem = detect_and_parse("package-lock.json", data)
|
|
assert ecosystem == "npm"
|
|
assert len(components) == 1
|
|
|
|
def test_requirements_detection(self):
|
|
components, ecosystem = detect_and_parse("requirements.txt", "flask==2.0.0")
|
|
assert ecosystem == "PyPI"
|
|
assert len(components) == 1
|
|
|
|
def test_unknown_format(self):
|
|
components, ecosystem = detect_and_parse("readme.md", "Hello World")
|
|
assert len(components) == 0
|
|
|
|
|
|
class TestGenerateSbom:
|
|
"""Tests for CycloneDX SBOM generation."""
|
|
|
|
def test_sbom_structure(self):
|
|
components = [
|
|
{"name": "react", "version": "18.3.0", "type": "library", "ecosystem": "npm", "license": "MIT"},
|
|
]
|
|
sbom = generate_sbom(components, "npm")
|
|
assert sbom["bomFormat"] == "CycloneDX"
|
|
assert sbom["specVersion"] == "1.5"
|
|
assert len(sbom["components"]) == 1
|
|
assert sbom["components"][0]["purl"] == "pkg:npm/react@18.3.0"
|
|
|
|
def test_sbom_empty_components(self):
|
|
sbom = generate_sbom([], "npm")
|
|
assert sbom["components"] == []
|
|
|
|
def test_sbom_unknown_license_excluded(self):
|
|
components = [
|
|
{"name": "x", "version": "1.0", "type": "library", "ecosystem": "npm", "license": "unknown"},
|
|
]
|
|
sbom = generate_sbom(components, "npm")
|
|
assert sbom["components"][0]["licenses"] == []
|
|
|
|
|
|
class TestMapOsvSeverity:
|
|
"""Tests for OSV severity mapping."""
|
|
|
|
def test_critical_severity(self):
|
|
vuln = {"database_specific": {"severity": "CRITICAL"}}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "CRITICAL"
|
|
assert cvss == 9.5
|
|
|
|
def test_medium_default(self):
|
|
vuln = {}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "MEDIUM"
|
|
assert cvss == 5.0
|
|
|
|
def test_low_severity(self):
|
|
vuln = {"database_specific": {"severity": "LOW"}}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "LOW"
|
|
assert cvss == 2.5
|
|
|
|
|
|
class TestExtractFixVersion:
|
|
"""Tests for extracting fix version from OSV data."""
|
|
|
|
def test_fix_version_found(self):
|
|
vuln = {
|
|
"affected": [{
|
|
"package": {"name": "lodash"},
|
|
"ranges": [{"events": [{"introduced": "0"}, {"fixed": "4.17.21"}]}],
|
|
}]
|
|
}
|
|
assert extract_fix_version(vuln, "lodash") == "4.17.21"
|
|
|
|
def test_no_fix_version(self):
|
|
vuln = {"affected": [{"package": {"name": "x"}, "ranges": [{"events": [{"introduced": "0"}]}]}]}
|
|
assert extract_fix_version(vuln, "x") is None
|
|
|
|
def test_wrong_package_name(self):
|
|
vuln = {
|
|
"affected": [{
|
|
"package": {"name": "other"},
|
|
"ranges": [{"events": [{"fixed": "1.0"}]}],
|
|
}]
|
|
}
|
|
assert extract_fix_version(vuln, "lodash") is None
|
|
|
|
|
|
# =============================================================================
|
|
# Extended tests — additional coverage
|
|
# =============================================================================
|
|
|
|
class TestParsePackageLockExtended:
|
|
"""Extended tests for package-lock.json parsing."""
|
|
|
|
def test_scoped_packages_parsed(self):
|
|
data = json.dumps({
|
|
"packages": {
|
|
"node_modules/@babel/core": {"version": "7.24.0"},
|
|
"node_modules/@types/node": {"version": "20.0.0"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert len(components) == 2
|
|
names = [c["name"] for c in components]
|
|
assert "@babel/core" in names
|
|
assert "@types/node" in names
|
|
|
|
def test_ecosystem_is_npm(self):
|
|
data = json.dumps({
|
|
"packages": {
|
|
"node_modules/lodash": {"version": "4.17.21"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert components[0]["ecosystem"] == "npm"
|
|
|
|
def test_component_has_type(self):
|
|
data = json.dumps({
|
|
"packages": {
|
|
"node_modules/express": {"version": "4.18.2"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert "type" in components[0]
|
|
|
|
def test_v1_with_nested_deps_ignored(self):
|
|
# v1 format: only top-level dependencies counted
|
|
data = json.dumps({
|
|
"dependencies": {
|
|
"express": {"version": "4.18.2"},
|
|
}
|
|
})
|
|
components = parse_package_lock(data)
|
|
assert len(components) == 1
|
|
|
|
def test_empty_packages_object(self):
|
|
data = json.dumps({"packages": {}})
|
|
components = parse_package_lock(data)
|
|
assert components == []
|
|
|
|
|
|
class TestParseRequirementsTxtExtended:
|
|
"""Extended tests for requirements.txt parsing."""
|
|
|
|
def test_tilde_versions_parsed(self):
|
|
content = "flask~=2.0.0"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 1
|
|
|
|
def test_no_version_specifier(self):
|
|
content = "requests\nnumpy\npandas"
|
|
components = parse_requirements_txt(content)
|
|
assert len(components) == 3
|
|
for c in components:
|
|
assert c["version"] == "latest"
|
|
|
|
def test_ecosystem_is_pypi(self):
|
|
content = "fastapi==0.100.0"
|
|
components = parse_requirements_txt(content)
|
|
assert components[0]["ecosystem"] == "PyPI"
|
|
|
|
def test_component_has_name(self):
|
|
content = "cryptography>=42.0.0"
|
|
components = parse_requirements_txt(content)
|
|
assert components[0]["name"] == "cryptography"
|
|
|
|
def test_extras_are_not_crashed(self):
|
|
# requirements with extras syntax — may or may not parse depending on impl
|
|
content = "requests[security]==2.31.0\nflask==2.0.0"
|
|
components = parse_requirements_txt(content)
|
|
# At minimum, flask should be parsed
|
|
names = [c["name"] for c in components]
|
|
assert "flask" in names
|
|
|
|
|
|
class TestParseYarnLockExtended:
|
|
"""Extended tests for yarn.lock parsing."""
|
|
|
|
def test_multiple_packages(self):
|
|
content = (
|
|
'"react@^18.0.0":\n version "18.3.0"\n'
|
|
'"lodash@^4.17.0":\n version "4.17.21"\n'
|
|
'"typescript@^5.0.0":\n version "5.4.5"\n'
|
|
)
|
|
components = parse_yarn_lock(content)
|
|
assert len(components) == 3
|
|
|
|
def test_empty_yarn_lock(self):
|
|
components = parse_yarn_lock("")
|
|
assert isinstance(components, list)
|
|
|
|
def test_yarn_lock_ecosystem(self):
|
|
content = '"react@^18.0.0":\n version "18.3.0"\n'
|
|
components = parse_yarn_lock(content)
|
|
if components:
|
|
assert components[0]["ecosystem"] == "npm"
|
|
|
|
|
|
class TestDetectAndParseExtended:
|
|
"""Extended tests for file type detection."""
|
|
|
|
def test_yarn_lock_detection(self):
|
|
content = '"lodash@^4.17.0":\n version "4.17.21"'
|
|
components, ecosystem = detect_and_parse("yarn.lock", content)
|
|
assert ecosystem == "npm"
|
|
|
|
def test_go_mod_detection(self):
|
|
content = 'module example.com/app\nrequire github.com/gin-gonic/gin v1.9.1'
|
|
# go.mod is not yet supported — detect_and_parse returns unknown
|
|
components, ecosystem = detect_and_parse("go.mod", content)
|
|
assert ecosystem in ("Go", "unknown")
|
|
|
|
def test_case_insensitive_filename(self):
|
|
data = json.dumps({"packages": {"node_modules/x": {"version": "1.0"}}})
|
|
# Some implementations may be case-sensitive, just verify no crash
|
|
try:
|
|
components, ecosystem = detect_and_parse("Package-Lock.json", data)
|
|
except Exception:
|
|
pass # OK if not supported
|
|
|
|
def test_returns_tuple(self):
|
|
result = detect_and_parse("requirements.txt", "flask==2.0.0")
|
|
assert isinstance(result, tuple)
|
|
assert len(result) == 2
|
|
|
|
|
|
class TestGenerateSbomExtended:
|
|
"""Extended tests for CycloneDX SBOM generation."""
|
|
|
|
def test_sbom_has_metadata(self):
|
|
components = [{"name": "react", "version": "18.0.0", "type": "library", "ecosystem": "npm", "license": "MIT"}]
|
|
sbom = generate_sbom(components, "npm")
|
|
assert "metadata" in sbom
|
|
|
|
def test_sbom_metadata_present(self):
|
|
sbom = generate_sbom([], "PyPI")
|
|
assert "metadata" in sbom
|
|
|
|
def test_multiple_components(self):
|
|
components = [
|
|
{"name": "react", "version": "18.0.0", "type": "library", "ecosystem": "npm", "license": "MIT"},
|
|
{"name": "lodash", "version": "4.17.21", "type": "library", "ecosystem": "npm", "license": "MIT"},
|
|
]
|
|
sbom = generate_sbom(components, "npm")
|
|
assert len(sbom["components"]) == 2
|
|
|
|
def test_purl_format_pypi(self):
|
|
components = [{"name": "fastapi", "version": "0.100.0", "type": "library", "ecosystem": "PyPI", "license": "MIT"}]
|
|
sbom = generate_sbom(components, "PyPI")
|
|
assert sbom["components"][0]["purl"] == "pkg:pypi/fastapi@0.100.0"
|
|
|
|
def test_purl_format_go(self):
|
|
components = [{"name": "github.com/gin-gonic/gin", "version": "1.9.1", "type": "library", "ecosystem": "Go", "license": "MIT"}]
|
|
sbom = generate_sbom(components, "Go")
|
|
purl = sbom["components"][0]["purl"]
|
|
assert purl.startswith("pkg:go/")
|
|
|
|
def test_sbom_spec_version(self):
|
|
sbom = generate_sbom([], "npm")
|
|
assert sbom["specVersion"] == "1.5"
|
|
|
|
def test_sbom_bom_format(self):
|
|
sbom = generate_sbom([], "npm")
|
|
assert sbom["bomFormat"] == "CycloneDX"
|
|
|
|
|
|
class TestMapOsvSeverityExtended:
|
|
"""Extended tests for OSV severity mapping."""
|
|
|
|
def test_high_severity(self):
|
|
vuln = {"database_specific": {"severity": "HIGH"}}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "HIGH"
|
|
assert cvss == 7.5
|
|
|
|
def test_all_severities_return_tuple(self):
|
|
for sev in ["CRITICAL", "HIGH", "MEDIUM", "LOW"]:
|
|
vuln = {"database_specific": {"severity": sev}}
|
|
result = map_osv_severity(vuln)
|
|
assert isinstance(result, tuple)
|
|
assert len(result) == 2
|
|
|
|
def test_unknown_severity_returns_medium(self):
|
|
vuln = {"database_specific": {"severity": "UNKNOWN_LEVEL"}}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "MEDIUM"
|
|
assert cvss == 5.0
|
|
|
|
def test_cvss_is_float(self):
|
|
vuln = {"database_specific": {"severity": "CRITICAL"}}
|
|
_, cvss = map_osv_severity(vuln)
|
|
assert isinstance(cvss, float)
|
|
|
|
def test_no_affected_field(self):
|
|
vuln = {}
|
|
severity, cvss = map_osv_severity(vuln)
|
|
assert severity == "MEDIUM"
|
|
|
|
|
|
class TestExtractFixVersionExtended:
|
|
"""Extended tests for fix version extraction."""
|
|
|
|
def test_multiple_affected_packages(self):
|
|
vuln = {
|
|
"affected": [
|
|
{"package": {"name": "other-pkg"}, "ranges": [{"events": [{"fixed": "2.0"}]}]},
|
|
{"package": {"name": "my-pkg"}, "ranges": [{"events": [{"fixed": "1.5.0"}]}]},
|
|
]
|
|
}
|
|
result = extract_fix_version(vuln, "my-pkg")
|
|
assert result == "1.5.0"
|
|
|
|
def test_empty_affected_list(self):
|
|
vuln = {"affected": []}
|
|
result = extract_fix_version(vuln, "lodash")
|
|
assert result is None
|
|
|
|
def test_no_affected_key(self):
|
|
result = extract_fix_version({}, "lodash")
|
|
assert result is None
|
|
|
|
def test_multiple_events_returns_fixed(self):
|
|
vuln = {
|
|
"affected": [{
|
|
"package": {"name": "pkg"},
|
|
"ranges": [{"events": [
|
|
{"introduced": "0"},
|
|
{"introduced": "1.0"},
|
|
{"fixed": "2.0.1"},
|
|
]}],
|
|
}]
|
|
}
|
|
result = extract_fix_version(vuln, "pkg")
|
|
assert result == "2.0.1"
|
|
|
|
|
|
# =============================================================================
|
|
# API Endpoint Tests
|
|
# =============================================================================
|
|
|
|
from fastapi import FastAPI
|
|
from fastapi.testclient import TestClient
|
|
from unittest.mock import MagicMock, patch, AsyncMock
|
|
from compliance.api.screening_routes import router as screening_router
|
|
|
|
_app_scr = FastAPI()
|
|
_app_scr.include_router(screening_router)
|
|
_client_scr = TestClient(_app_scr)
|
|
|
|
TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
|
|
HEADERS = {"X-Tenant-ID": TENANT_ID}
|
|
SCREENING_UUID = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
|
|
|
|
|
|
def _make_screening_row():
|
|
"""Return a row-like list for a screening DB record."""
|
|
# id, status, sbom_format, sbom_version, total_components, total_issues,
|
|
# critical_issues, high_issues, medium_issues, low_issues,
|
|
# sbom_data, started_at, completed_at
|
|
return [
|
|
SCREENING_UUID, "completed", "CycloneDX", "1.5",
|
|
3, 0, 0, 0, 0, 0,
|
|
{"components": [], "metadata": {}}, "2024-01-15T10:00:00", "2024-01-15T10:01:00",
|
|
]
|
|
|
|
|
|
class TestScanEndpoint:
|
|
"""API tests for POST /v1/screening/scan."""
|
|
|
|
def test_scan_requirements_txt_success(self):
|
|
"""Valid requirements.txt returns completed screening."""
|
|
txt = b"fastapi==0.100.0\nhttpx==0.25.0\npydantic==2.0.0"
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL, \
|
|
patch("compliance.api.screening_routes.scan_vulnerabilities", new_callable=AsyncMock) as mock_scan:
|
|
mock_scan.return_value = []
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
|
|
response = _client_scr.post(
|
|
"/v1/screening/scan",
|
|
files={"file": ("requirements.txt", txt, "text/plain")},
|
|
data={"tenant_id": TENANT_ID},
|
|
)
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["status"] == "completed"
|
|
assert data["total_components"] == 3
|
|
assert data["total_issues"] == 0
|
|
assert data["sbom_format"] == "CycloneDX"
|
|
|
|
def test_scan_package_lock_success(self):
|
|
"""Valid package-lock.json returns completed screening."""
|
|
import json as _json
|
|
pkg_lock = _json.dumps({
|
|
"packages": {
|
|
"node_modules/react": {"version": "18.3.0", "license": "MIT"},
|
|
"node_modules/lodash": {"version": "4.17.21", "license": "MIT"},
|
|
}
|
|
}).encode()
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL, \
|
|
patch("compliance.api.screening_routes.scan_vulnerabilities", new_callable=AsyncMock) as mock_scan:
|
|
mock_scan.return_value = []
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
|
|
response = _client_scr.post(
|
|
"/v1/screening/scan",
|
|
files={"file": ("package-lock.json", pkg_lock, "application/json")},
|
|
data={"tenant_id": TENANT_ID},
|
|
)
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["status"] == "completed"
|
|
assert data["total_components"] == 2
|
|
|
|
def test_scan_missing_file_returns_422(self):
|
|
"""Request without file returns 422."""
|
|
response = _client_scr.post(
|
|
"/v1/screening/scan",
|
|
data={"tenant_id": TENANT_ID},
|
|
)
|
|
assert response.status_code == 422
|
|
|
|
def test_scan_unparseable_file_returns_400(self):
|
|
"""File that cannot be parsed returns 400."""
|
|
with patch("compliance.api.screening_routes.SessionLocal"):
|
|
response = _client_scr.post(
|
|
"/v1/screening/scan",
|
|
files={"file": ("readme.md", b"# Just a readme", "text/plain")},
|
|
data={"tenant_id": TENANT_ID},
|
|
)
|
|
assert response.status_code == 400
|
|
|
|
def test_scan_with_vulnerabilities(self):
|
|
"""When vulnerabilities are found, issues list is populated."""
|
|
txt = b"fastapi==0.1.0"
|
|
fake_issue = {
|
|
"id": "issue-uuid",
|
|
"severity": "HIGH",
|
|
"title": "Remote Code Execution",
|
|
"description": "RCE vulnerability in fastapi",
|
|
"cve": "CVE-2024-0001",
|
|
"cvss": 7.5,
|
|
"affected_component": "fastapi",
|
|
"affected_version": "0.1.0",
|
|
"fixed_in": "0.2.0",
|
|
"remediation": "Upgrade fastapi to 0.2.0",
|
|
"status": "OPEN",
|
|
}
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL, \
|
|
patch("compliance.api.screening_routes.scan_vulnerabilities", new_callable=AsyncMock) as mock_scan:
|
|
mock_scan.return_value = [fake_issue]
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
|
|
response = _client_scr.post(
|
|
"/v1/screening/scan",
|
|
files={"file": ("requirements.txt", txt, "text/plain")},
|
|
data={"tenant_id": TENANT_ID},
|
|
)
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["total_issues"] == 1
|
|
assert data["high_issues"] == 1
|
|
assert len(data["issues"]) == 1
|
|
assert data["issues"][0]["cve"] == "CVE-2024-0001"
|
|
|
|
|
|
class TestGetScreeningEndpoint:
|
|
"""API tests for GET /v1/screening/{screening_id}."""
|
|
|
|
def test_get_screening_success(self):
|
|
"""Returns ScreeningResponse for a known ID."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
mock_result.fetchone.return_value = _make_screening_row()
|
|
mock_issues = MagicMock()
|
|
mock_issues.fetchall.return_value = []
|
|
mock_session.execute.side_effect = [mock_result, mock_issues]
|
|
|
|
response = _client_scr.get(f"/v1/screening/{SCREENING_UUID}")
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["id"] == SCREENING_UUID
|
|
assert data["status"] == "completed"
|
|
assert data["sbom_format"] == "CycloneDX"
|
|
|
|
def test_get_screening_not_found(self):
|
|
"""Returns 404 for unknown screening ID."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
mock_result.fetchone.return_value = None
|
|
mock_session.execute.return_value = mock_result
|
|
|
|
response = _client_scr.get("/v1/screening/nonexistent-uuid")
|
|
|
|
assert response.status_code == 404
|
|
|
|
def test_get_screening_includes_issues(self):
|
|
"""Issues from DB are included in response."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
mock_result.fetchone.return_value = _make_screening_row()
|
|
mock_issues = MagicMock()
|
|
# Row: id, severity, title, description, cve, cvss,
|
|
# affected_component, affected_version, fixed_in, remediation, status
|
|
mock_issues.fetchall.return_value = [
|
|
["issue-1", "HIGH", "XSS Vuln", "desc", "CVE-2024-001",
|
|
7.5, "react", "18.0.0", "18.3.0", "Upgrade react", "OPEN"],
|
|
]
|
|
mock_session.execute.side_effect = [mock_result, mock_issues]
|
|
|
|
response = _client_scr.get(f"/v1/screening/{SCREENING_UUID}")
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert len(data["issues"]) == 1
|
|
assert data["issues"][0]["severity"] == "HIGH"
|
|
|
|
|
|
class TestListScreeningsEndpoint:
|
|
"""API tests for GET /v1/screening."""
|
|
|
|
def test_list_empty(self):
|
|
"""Returns empty list when no screenings exist."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
mock_result.fetchall.return_value = []
|
|
mock_session.execute.return_value = mock_result
|
|
|
|
response = _client_scr.get("/v1/screening", params={"tenant_id": TENANT_ID})
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["screenings"] == []
|
|
assert data["total"] == 0
|
|
|
|
def test_list_with_data(self):
|
|
"""Returns correct total count."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
# Row: id, status, total_components, total_issues,
|
|
# critical, high, medium, low, started_at, completed_at, created_at
|
|
mock_result.fetchall.return_value = [
|
|
["uuid-1", "completed", 10, 2, 0, 1, 1, 0,
|
|
"2024-01-15T10:00:00", "2024-01-15T10:01:00", "2024-01-15"],
|
|
["uuid-2", "completed", 5, 0, 0, 0, 0, 0,
|
|
"2024-01-16T09:00:00", "2024-01-16T09:00:30", "2024-01-16"],
|
|
]
|
|
mock_session.execute.return_value = mock_result
|
|
|
|
response = _client_scr.get("/v1/screening", params={"tenant_id": TENANT_ID})
|
|
|
|
assert response.status_code == 200
|
|
data = response.json()
|
|
assert data["total"] == 2
|
|
assert len(data["screenings"]) == 2
|
|
|
|
def test_list_tenant_filter(self):
|
|
"""Tenant ID is used to filter screenings."""
|
|
with patch("compliance.api.screening_routes.SessionLocal") as MockSL:
|
|
mock_session = MagicMock()
|
|
MockSL.return_value = mock_session
|
|
mock_result = MagicMock()
|
|
mock_result.fetchall.return_value = []
|
|
mock_session.execute.return_value = mock_result
|
|
|
|
_client_scr.get("/v1/screening", params={"tenant_id": "specific-tenant"})
|
|
|
|
call_args = mock_session.execute.call_args
|
|
assert "specific-tenant" in str(call_args)
|