Compare commits
15 Commits
feature/ke
...
feat/mcp-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be019b5d4c | ||
|
|
1bf25525d8 | ||
|
|
e4495e405d | ||
|
|
abd6f65d55 | ||
|
|
d13cef94cb | ||
|
|
3a01a28591 | ||
|
|
d490359591 | ||
|
|
b95ce44fb9 | ||
|
|
175d303dc4 | ||
|
|
5a4af292fc | ||
|
|
04c8084943 | ||
|
|
d67a51db18 | ||
| 7e12d1433a | |||
| 65abc55915 | |||
| 0cb06d3d6d |
11
.env.example
11
.env.example
@@ -37,3 +37,14 @@ GIT_CLONE_BASE_PATH=/tmp/compliance-scanner/repos
|
||||
# Dashboard
|
||||
DASHBOARD_PORT=8080
|
||||
AGENT_API_URL=http://localhost:3001
|
||||
|
||||
# Keycloak (required for authentication)
|
||||
KEYCLOAK_URL=http://localhost:8080
|
||||
KEYCLOAK_REALM=compliance
|
||||
KEYCLOAK_CLIENT_ID=compliance-dashboard
|
||||
REDIRECT_URI=http://localhost:8080/auth/callback
|
||||
APP_URL=http://localhost:8080
|
||||
|
||||
# OpenTelemetry (optional - omit to disable)
|
||||
# OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
|
||||
# OTEL_SERVICE_NAME=compliance-agent
|
||||
|
||||
@@ -70,6 +70,8 @@ jobs:
|
||||
run: cargo clippy -p compliance-dashboard --features server --no-default-features -- -D warnings
|
||||
- name: Clippy (dashboard web)
|
||||
run: cargo clippy -p compliance-dashboard --features web --no-default-features -- -D warnings
|
||||
- name: Clippy (mcp)
|
||||
run: cargo clippy -p compliance-mcp -- -D warnings
|
||||
- name: Show sccache stats
|
||||
run: sccache --show-stats
|
||||
if: always()
|
||||
@@ -124,3 +126,119 @@ jobs:
|
||||
- name: Show sccache stats
|
||||
run: sccache --show-stats
|
||||
if: always()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Stage 3: Deploy (only on main, after tests pass)
|
||||
# Each service only deploys when its relevant files changed.
|
||||
# ---------------------------------------------------------------------------
|
||||
detect-changes:
|
||||
name: Detect Changes
|
||||
runs-on: docker
|
||||
if: github.ref == 'refs/heads/main'
|
||||
needs: [test]
|
||||
container:
|
||||
image: alpine:latest
|
||||
outputs:
|
||||
agent: ${{ steps.changes.outputs.agent }}
|
||||
dashboard: ${{ steps.changes.outputs.dashboard }}
|
||||
docs: ${{ steps.changes.outputs.docs }}
|
||||
mcp: ${{ steps.changes.outputs.mcp }}
|
||||
steps:
|
||||
- name: Install git
|
||||
run: apk add --no-cache git
|
||||
- name: Checkout
|
||||
run: |
|
||||
git init
|
||||
git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=2 origin "${GITHUB_SHA}"
|
||||
git checkout FETCH_HEAD
|
||||
- name: Detect changed paths
|
||||
id: changes
|
||||
run: |
|
||||
CHANGED=$(git diff --name-only HEAD~1 HEAD 2>/dev/null || echo "")
|
||||
echo "Changed files:"
|
||||
echo "$CHANGED"
|
||||
|
||||
# Agent: core libs, agent code, agent Dockerfile
|
||||
if echo "$CHANGED" | grep -qE '^(compliance-core/|compliance-agent/|compliance-graph/|compliance-dast/|Dockerfile\.agent|Cargo\.(toml|lock))'; then
|
||||
echo "agent=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "agent=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# Dashboard: core libs, dashboard code, dashboard Dockerfile, assets
|
||||
if echo "$CHANGED" | grep -qE '^(compliance-core/|compliance-dashboard/|Dockerfile\.dashboard|Dioxus\.toml|assets/|bin/|Cargo\.(toml|lock))'; then
|
||||
echo "dashboard=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "dashboard=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# Docs: docs folder, docs Dockerfile
|
||||
if echo "$CHANGED" | grep -qE '^(docs/|Dockerfile\.docs)'; then
|
||||
echo "docs=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "docs=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# MCP: core libs, mcp code, mcp Dockerfile
|
||||
if echo "$CHANGED" | grep -qE '^(compliance-core/|compliance-mcp/|Dockerfile\.mcp|Cargo\.(toml|lock))'; then
|
||||
echo "mcp=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "mcp=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
deploy-agent:
|
||||
name: Deploy Agent
|
||||
runs-on: docker
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.agent == 'true'
|
||||
container:
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_AGENT }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-dashboard:
|
||||
name: Deploy Dashboard
|
||||
runs-on: docker
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.dashboard == 'true'
|
||||
container:
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_DASHBOARD }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-docs:
|
||||
name: Deploy Docs
|
||||
runs-on: docker
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.docs == 'true'
|
||||
container:
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_DOCS }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-mcp:
|
||||
name: Deploy MCP
|
||||
runs-on: docker
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.mcp == 'true'
|
||||
container:
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_MCP }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
443
Cargo.lock
generated
443
Cargo.lock
generated
@@ -413,6 +413,17 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||
|
||||
[[package]]
|
||||
name = "chacha20"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures 0.3.0",
|
||||
"rand_core 0.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset"
|
||||
version = "0.1.5"
|
||||
@@ -555,6 +566,7 @@ dependencies = [
|
||||
"git2",
|
||||
"hex",
|
||||
"hmac",
|
||||
"jsonwebtoken",
|
||||
"mongodb",
|
||||
"octocrab",
|
||||
"regex",
|
||||
@@ -582,11 +594,18 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"mongodb",
|
||||
"opentelemetry",
|
||||
"opentelemetry-appender-tracing",
|
||||
"opentelemetry-otlp",
|
||||
"opentelemetry_sdk",
|
||||
"secrecy",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"thiserror 2.0.18",
|
||||
"tracing",
|
||||
"tracing-opentelemetry",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -595,6 +614,8 @@ name = "compliance-dashboard"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"base64",
|
||||
"bson",
|
||||
"chrono",
|
||||
"compliance-core",
|
||||
"dioxus",
|
||||
@@ -605,14 +626,20 @@ dependencies = [
|
||||
"dotenvy",
|
||||
"gloo-timers",
|
||||
"mongodb",
|
||||
"rand 0.9.2",
|
||||
"reqwest",
|
||||
"secrecy",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"thiserror 2.0.18",
|
||||
"time",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tower-sessions",
|
||||
"tracing",
|
||||
"url",
|
||||
"uuid",
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
@@ -661,6 +688,27 @@ dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compliance-mcp"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"bson",
|
||||
"chrono",
|
||||
"compliance-core",
|
||||
"dotenvy",
|
||||
"mongodb",
|
||||
"rmcp",
|
||||
"schemars 1.2.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console_error_panic_hook"
|
||||
version = "0.1.7"
|
||||
@@ -792,7 +840,12 @@ version = "0.18.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"hmac",
|
||||
"percent-encoding",
|
||||
"rand 0.8.5",
|
||||
"sha2",
|
||||
"subtle",
|
||||
"time",
|
||||
"version_check",
|
||||
]
|
||||
@@ -850,6 +903,15 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.5.0"
|
||||
@@ -953,8 +1015,18 @@ version = "0.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_macro",
|
||||
"darling_core 0.21.3",
|
||||
"darling_macro 0.21.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d"
|
||||
dependencies = [
|
||||
"darling_core 0.23.0",
|
||||
"darling_macro 0.23.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -971,13 +1043,37 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_core"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0"
|
||||
dependencies = [
|
||||
"ident_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_core 0.21.3",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d"
|
||||
dependencies = [
|
||||
"darling_core 0.23.0",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
@@ -1808,7 +1904,7 @@ version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f43e744e4ea338060faee68ed933e46e722fb7f3617e722a5772d7e856d8b3ce"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -2085,6 +2181,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"rand_core 0.10.0",
|
||||
"wasip2",
|
||||
"wasip3",
|
||||
]
|
||||
@@ -2104,6 +2201,12 @@ dependencies = [
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
||||
|
||||
[[package]]
|
||||
name = "gloo-net"
|
||||
version = "0.6.0"
|
||||
@@ -3519,6 +3622,96 @@ dependencies = [
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e87237e2775f74896f9ad219d26a2081751187eb7c9f5c58dde20a23b95d16c"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"js-sys",
|
||||
"pin-project-lite",
|
||||
"thiserror 2.0.18",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-appender-tracing"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e716f864eb23007bdd9dc4aec381e188a1cee28eecf22066772b5fd822b9727d"
|
||||
dependencies = [
|
||||
"opentelemetry",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-http"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46d7ab32b827b5b495bd90fa95a6cb65ccc293555dcc3199ae2937d2d237c8ed"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"http",
|
||||
"opentelemetry",
|
||||
"reqwest",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-otlp"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d899720fe06916ccba71c01d04ecd77312734e2de3467fd30d9d580c8ce85656"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"http",
|
||||
"opentelemetry",
|
||||
"opentelemetry-http",
|
||||
"opentelemetry-proto",
|
||||
"opentelemetry_sdk",
|
||||
"prost",
|
||||
"reqwest",
|
||||
"thiserror 2.0.18",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-proto"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c40da242381435e18570d5b9d50aca2a4f4f4d8e146231adb4e7768023309b3"
|
||||
dependencies = [
|
||||
"opentelemetry",
|
||||
"opentelemetry_sdk",
|
||||
"prost",
|
||||
"tonic",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry_sdk"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afdefb21d1d47394abc1ba6c57363ab141be19e27cc70d0e422b7f303e4d290b"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-executor",
|
||||
"futures-util",
|
||||
"glob",
|
||||
"opentelemetry",
|
||||
"percent-encoding",
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ownedbytes"
|
||||
version = "0.7.0"
|
||||
@@ -3551,6 +3744,12 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pastey"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec"
|
||||
|
||||
[[package]]
|
||||
name = "pbkdf2"
|
||||
version = "0.12.2"
|
||||
@@ -3752,6 +3951,29 @@ dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost"
|
||||
version = "0.13.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost-derive"
|
||||
version = "0.13.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psl-types"
|
||||
version = "2.0.11"
|
||||
@@ -3865,6 +4087,17 @@ dependencies = [
|
||||
"rand_core 0.9.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8"
|
||||
dependencies = [
|
||||
"chacha20",
|
||||
"getrandom 0.4.1",
|
||||
"rand_core 0.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
@@ -3903,6 +4136,12 @@ dependencies = [
|
||||
"getrandom 0.3.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba"
|
||||
|
||||
[[package]]
|
||||
name = "rand_distr"
|
||||
version = "0.4.3"
|
||||
@@ -4007,6 +4246,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"cookie",
|
||||
"cookie_store",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http",
|
||||
@@ -4022,6 +4262,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"quinn",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4061,6 +4302,50 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rmcp"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc4c9c94680f75470ee8083a0667988b5d7b5beb70b9f998a8e51de7c682ce60"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"futures",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"pastey",
|
||||
"pin-project-lite",
|
||||
"rand 0.10.0",
|
||||
"rmcp-macros",
|
||||
"schemars 1.2.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sse-stream",
|
||||
"thiserror 2.0.18",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rmcp-macros"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90c23c8f26cae4da838fbc3eadfaecf2d549d97c04b558e7bd90526a9c28b42a"
|
||||
dependencies = [
|
||||
"darling 0.23.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_json",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-stemmers"
|
||||
version = "1.2.0"
|
||||
@@ -4224,12 +4509,26 @@ version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"dyn-clone",
|
||||
"ref-cast",
|
||||
"schemars_derive",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
@@ -4369,6 +4668,17 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive_internals"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.149"
|
||||
@@ -4453,7 +4763,7 @@ version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6d4e30573c8cb306ed6ab1dca8423eec9a463ea0e155f45399455e0368b27e0"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -4475,7 +4785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"cpufeatures 0.2.17",
|
||||
"digest",
|
||||
]
|
||||
|
||||
@@ -4486,7 +4796,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"cpufeatures 0.2.17",
|
||||
"digest",
|
||||
]
|
||||
|
||||
@@ -4640,6 +4950,19 @@ version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||
|
||||
[[package]]
|
||||
name = "sse-stream"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.1"
|
||||
@@ -5211,6 +5534,27 @@ dependencies = [
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tonic"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64",
|
||||
"bytes",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"percent-encoding",
|
||||
"pin-project",
|
||||
"prost",
|
||||
"tokio-stream",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.5.3"
|
||||
@@ -5228,6 +5572,22 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-cookies"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "151b5a3e3c45df17466454bb74e9ecedecc955269bdedbf4d150dfa393b55a36"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"cookie",
|
||||
"futures-util",
|
||||
"http",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.8"
|
||||
@@ -5268,6 +5628,57 @@ version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tower-sessions"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "518dca34b74a17cadfcee06e616a09d2bd0c3984eff1769e1e76d58df978fc78"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"http",
|
||||
"time",
|
||||
"tokio",
|
||||
"tower-cookies",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tower-sessions-core",
|
||||
"tower-sessions-memory-store",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-sessions-core"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "568531ec3dfcf3ffe493de1958ae5662a0284ac5d767476ecdb6a34ff8c6b06c"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum-core",
|
||||
"base64",
|
||||
"futures",
|
||||
"http",
|
||||
"parking_lot",
|
||||
"rand 0.9.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.18",
|
||||
"time",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-sessions-memory-store"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "713fabf882b6560a831e2bbed6204048b35bdd60e50bbb722902c74f8df33460"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"time",
|
||||
"tokio",
|
||||
"tower-sessions-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.44"
|
||||
@@ -5322,6 +5733,24 @@ dependencies = [
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-opentelemetry"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd8e764bd6f5813fd8bebc3117875190c5b0415be8f7f8059bffb6ecd979c444"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"once_cell",
|
||||
"opentelemetry",
|
||||
"opentelemetry_sdk",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
"tracing-subscriber",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.22"
|
||||
|
||||
@@ -5,6 +5,7 @@ members = [
|
||||
"compliance-dashboard",
|
||||
"compliance-graph",
|
||||
"compliance-dast",
|
||||
"compliance-mcp",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
|
||||
@@ -2,8 +2,11 @@ FROM rust:1.89-bookworm AS builder
|
||||
|
||||
RUN cargo install dioxus-cli --version 0.7.3
|
||||
|
||||
ARG DOCS_URL=/docs
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
ENV DOCS_URL=${DOCS_URL}
|
||||
RUN dx build --release --package compliance-dashboard
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
@@ -13,6 +16,7 @@ WORKDIR /app
|
||||
COPY --from=builder /app/target/dx/compliance-dashboard/release/web/compliance-dashboard /app/compliance-dashboard
|
||||
COPY --from=builder /app/target/dx/compliance-dashboard/release/web/public /app/public
|
||||
|
||||
ENV IP=0.0.0.0
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["./compliance-dashboard"]
|
||||
|
||||
14
Dockerfile.docs
Normal file
14
Dockerfile.docs
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM node:22-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
COPY docs/package.json docs/package-lock.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY docs/ .
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:alpine
|
||||
RUN rm /etc/nginx/conf.d/default.conf
|
||||
COPY docs/nginx.conf /etc/nginx/conf.d/default.conf
|
||||
COPY --from=builder /app/.vitepress/dist /usr/share/nginx/html
|
||||
EXPOSE 80
|
||||
16
Dockerfile.mcp
Normal file
16
Dockerfile.mcp
Normal file
@@ -0,0 +1,16 @@
|
||||
FROM rust:1.89-bookworm AS builder
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN cargo build --release -p compliance-mcp
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
RUN apt-get update && apt-get install -y ca-certificates libssl3 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /app/target/release/compliance-mcp /usr/local/bin/compliance-mcp
|
||||
|
||||
EXPOSE 8090
|
||||
|
||||
ENV MCP_PORT=8090
|
||||
|
||||
ENTRYPOINT ["compliance-mcp"]
|
||||
291
assets/main.css
291
assets/main.css
@@ -300,6 +300,84 @@ tr:hover {
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
/* Sidebar User Section */
|
||||
.sidebar-user {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 12px 14px;
|
||||
margin: 8px;
|
||||
border-top: 1px solid var(--border);
|
||||
padding-top: 16px;
|
||||
}
|
||||
|
||||
.sidebar-user-collapsed {
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
padding: 12px 4px;
|
||||
margin: 8px 4px;
|
||||
}
|
||||
|
||||
.user-avatar {
|
||||
width: 34px;
|
||||
height: 34px;
|
||||
border-radius: 10px;
|
||||
background: linear-gradient(135deg, rgba(56, 189, 248, 0.2), rgba(56, 189, 248, 0.08));
|
||||
border: 1px solid rgba(56, 189, 248, 0.15);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.avatar-initials {
|
||||
font-size: 13px;
|
||||
font-weight: 700;
|
||||
color: var(--accent);
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.avatar-img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border-radius: 10px;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.user-name {
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
color: var(--text-primary);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.logout-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
border-radius: 8px;
|
||||
color: var(--text-secondary);
|
||||
text-decoration: none;
|
||||
flex-shrink: 0;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.logout-btn:hover {
|
||||
background: rgba(239, 68, 68, 0.12);
|
||||
color: #fca5a5;
|
||||
}
|
||||
|
||||
.logout-btn-collapsed {
|
||||
width: 34px;
|
||||
height: 34px;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.sidebar {
|
||||
transform: translateX(-100%);
|
||||
@@ -313,3 +391,216 @@ tr:hover {
|
||||
padding: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* ── Utility classes ────────────────────────────────────── */
|
||||
|
||||
.mb-3 { margin-bottom: 12px; }
|
||||
.mb-4 { margin-bottom: 16px; }
|
||||
.text-secondary { color: var(--text-secondary); }
|
||||
|
||||
.btn-sm {
|
||||
padding: 4px 10px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background: var(--danger);
|
||||
color: #fff;
|
||||
}
|
||||
.btn-danger:hover {
|
||||
background: #dc2626;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--bg-secondary);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
}
|
||||
.btn-secondary:hover {
|
||||
background: var(--bg-primary);
|
||||
}
|
||||
|
||||
/* ── Modal ──────────────────────────────────────────────── */
|
||||
|
||||
.modal-overlay {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
backdrop-filter: blur(4px);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
.modal-dialog {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 24px;
|
||||
max-width: 440px;
|
||||
width: 90%;
|
||||
}
|
||||
|
||||
.modal-dialog h3 {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.modal-dialog p {
|
||||
margin-bottom: 8px;
|
||||
font-size: 14px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.modal-warning {
|
||||
color: var(--warning) !important;
|
||||
font-size: 13px !important;
|
||||
}
|
||||
|
||||
.modal-actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
justify-content: flex-end;
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
/* ── MCP Servers ────────────────────────────────────────── */
|
||||
|
||||
.mcp-server-card {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.mcp-server-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.mcp-server-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.mcp-server-title h3 {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.mcp-server-actions {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.mcp-status {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 10px;
|
||||
border-radius: 20px;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.mcp-status-running {
|
||||
background: rgba(34, 197, 94, 0.15);
|
||||
color: var(--success);
|
||||
}
|
||||
|
||||
.mcp-status-stopped {
|
||||
background: rgba(148, 163, 184, 0.15);
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.mcp-status-error {
|
||||
background: rgba(239, 68, 68, 0.15);
|
||||
color: var(--danger);
|
||||
}
|
||||
|
||||
.mcp-config-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
||||
gap: 12px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.mcp-config-item {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.mcp-config-label {
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.mcp-config-value {
|
||||
font-size: 13px;
|
||||
color: var(--text-primary);
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.mcp-form-grid {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
gap: 0 16px;
|
||||
}
|
||||
|
||||
.mcp-tools-section {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.mcp-tools-list {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 6px;
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
.mcp-tool-badge {
|
||||
display: inline-block;
|
||||
padding: 3px 10px;
|
||||
background: rgba(56, 189, 248, 0.1);
|
||||
border: 1px solid rgba(56, 189, 248, 0.2);
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.mcp-token-section {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.mcp-token-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
.mcp-token-value {
|
||||
flex: 1;
|
||||
padding: 6px 10px;
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 6px;
|
||||
font-size: 12px;
|
||||
font-family: 'JetBrains Mono', monospace;
|
||||
color: var(--text-secondary);
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.mcp-meta {
|
||||
padding-top: 12px;
|
||||
border-top: 1px solid var(--border);
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
fn main() {
|
||||
dioxus_logger::init(tracing::Level::DEBUG).expect("Failed to init logger");
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
{
|
||||
dioxus_logger::init(tracing::Level::DEBUG).expect("Failed to init logger");
|
||||
dioxus::web::launch::launch_cfg(
|
||||
compliance_dashboard::App,
|
||||
dioxus::web::Config::new().hydrate(true),
|
||||
@@ -14,6 +13,9 @@ fn main() {
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
{
|
||||
dotenvy::dotenv().ok();
|
||||
let _telemetry_guard = compliance_core::telemetry::init_telemetry("compliance-dashboard");
|
||||
|
||||
compliance_dashboard::infrastructure::server_start(compliance_dashboard::App)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Unable to start server: {e}");
|
||||
|
||||
@@ -7,7 +7,7 @@ edition = "2021"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
compliance-core = { workspace = true, features = ["mongodb"] }
|
||||
compliance-core = { workspace = true, features = ["mongodb", "telemetry"] }
|
||||
compliance-graph = { path = "../compliance-graph" }
|
||||
compliance-dast = { path = "../compliance-dast" }
|
||||
serde = { workspace = true }
|
||||
@@ -35,3 +35,4 @@ walkdir = "2"
|
||||
base64 = "0.22"
|
||||
urlencoding = "2"
|
||||
futures-util = "0.3"
|
||||
jsonwebtoken = "9"
|
||||
|
||||
113
compliance-agent/src/api/auth_middleware.rs
Normal file
113
compliance-agent/src/api/auth_middleware.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
extract::Request,
|
||||
middleware::Next,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use jsonwebtoken::{decode, decode_header, jwk::JwkSet, DecodingKey, Validation};
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// Cached JWKS from Keycloak for token validation.
|
||||
#[derive(Clone)]
|
||||
pub struct JwksState {
|
||||
pub jwks: Arc<RwLock<Option<JwkSet>>>,
|
||||
pub jwks_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Claims {
|
||||
#[allow(dead_code)]
|
||||
sub: String,
|
||||
}
|
||||
|
||||
const PUBLIC_ENDPOINTS: &[&str] = &["/api/v1/health"];
|
||||
|
||||
/// Middleware that validates Bearer JWT tokens against Keycloak's JWKS.
|
||||
///
|
||||
/// Skips validation for health check endpoints.
|
||||
/// If `JwksState` is not present as an extension (keycloak not configured),
|
||||
/// all requests pass through.
|
||||
pub async fn require_jwt_auth(request: Request, next: Next) -> Response {
|
||||
let path = request.uri().path();
|
||||
|
||||
if PUBLIC_ENDPOINTS.contains(&path) {
|
||||
return next.run(request).await;
|
||||
}
|
||||
|
||||
let jwks_state = match request.extensions().get::<JwksState>() {
|
||||
Some(s) => s.clone(),
|
||||
None => return next.run(request).await,
|
||||
};
|
||||
|
||||
let auth_header = match request.headers().get("authorization") {
|
||||
Some(h) => h,
|
||||
None => return (StatusCode::UNAUTHORIZED, "Missing authorization header").into_response(),
|
||||
};
|
||||
|
||||
let token = match auth_header.to_str() {
|
||||
Ok(s) if s.starts_with("Bearer ") => &s[7..],
|
||||
_ => return (StatusCode::UNAUTHORIZED, "Invalid authorization header").into_response(),
|
||||
};
|
||||
|
||||
match validate_token(token, &jwks_state).await {
|
||||
Ok(()) => next.run(request).await,
|
||||
Err(e) => {
|
||||
tracing::warn!("JWT validation failed: {e}");
|
||||
(StatusCode::UNAUTHORIZED, "Invalid token").into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn validate_token(token: &str, state: &JwksState) -> Result<(), String> {
|
||||
let header = decode_header(token).map_err(|e| format!("failed to decode JWT header: {e}"))?;
|
||||
|
||||
let kid = header
|
||||
.kid
|
||||
.ok_or_else(|| "JWT missing kid header".to_string())?;
|
||||
|
||||
let jwks = fetch_or_get_jwks(state).await?;
|
||||
|
||||
let jwk = jwks
|
||||
.keys
|
||||
.iter()
|
||||
.find(|k| k.common.key_id.as_deref() == Some(&kid))
|
||||
.ok_or_else(|| "no matching key found in JWKS".to_string())?;
|
||||
|
||||
let decoding_key =
|
||||
DecodingKey::from_jwk(jwk).map_err(|e| format!("failed to create decoding key: {e}"))?;
|
||||
|
||||
let mut validation = Validation::new(header.alg);
|
||||
validation.validate_exp = true;
|
||||
validation.validate_aud = false;
|
||||
|
||||
decode::<Claims>(token, &decoding_key, &validation)
|
||||
.map_err(|e| format!("token validation failed: {e}"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_or_get_jwks(state: &JwksState) -> Result<JwkSet, String> {
|
||||
{
|
||||
let cached = state.jwks.read().await;
|
||||
if let Some(ref jwks) = *cached {
|
||||
return Ok(jwks.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let resp = reqwest::get(&state.jwks_url)
|
||||
.await
|
||||
.map_err(|e| format!("failed to fetch JWKS: {e}"))?;
|
||||
|
||||
let jwks: JwkSet = resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("failed to parse JWKS: {e}"))?;
|
||||
|
||||
let mut cached = state.jwks.write().await;
|
||||
*cached = Some(jwks.clone());
|
||||
|
||||
Ok(jwks)
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod auth_middleware;
|
||||
pub mod handlers;
|
||||
pub mod routes;
|
||||
pub mod server;
|
||||
|
||||
@@ -1,19 +1,37 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::Extension;
|
||||
use axum::{middleware, Extension};
|
||||
use tokio::sync::RwLock;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
use crate::api::auth_middleware::{require_jwt_auth, JwksState};
|
||||
use crate::api::routes;
|
||||
use crate::error::AgentError;
|
||||
|
||||
pub async fn start_api_server(agent: ComplianceAgent, port: u16) -> Result<(), AgentError> {
|
||||
let app = routes::build_router()
|
||||
.layer(Extension(Arc::new(agent)))
|
||||
let mut app = routes::build_router()
|
||||
.layer(Extension(Arc::new(agent.clone())))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(TraceLayer::new_for_http());
|
||||
|
||||
if let (Some(kc_url), Some(kc_realm)) =
|
||||
(&agent.config.keycloak_url, &agent.config.keycloak_realm)
|
||||
{
|
||||
let jwks_url = format!("{kc_url}/realms/{kc_realm}/protocol/openid-connect/certs");
|
||||
let jwks_state = JwksState {
|
||||
jwks: Arc::new(RwLock::new(None)),
|
||||
jwks_url,
|
||||
};
|
||||
tracing::info!("Keycloak JWT auth enabled for realm '{kc_realm}'");
|
||||
app = app
|
||||
.layer(Extension(jwks_state))
|
||||
.layer(middleware::from_fn(require_jwt_auth));
|
||||
} else {
|
||||
tracing::warn!("Keycloak not configured - API endpoints are unprotected");
|
||||
}
|
||||
|
||||
let addr = format!("0.0.0.0:{port}");
|
||||
let listener = tokio::net::TcpListener::bind(&addr)
|
||||
.await
|
||||
|
||||
@@ -45,5 +45,7 @@ pub fn load_config() -> Result<AgentConfig, AgentError> {
|
||||
.unwrap_or_else(|| "0 0 0 * * *".to_string()),
|
||||
git_clone_base_path: env_var_opt("GIT_CLONE_BASE_PATH")
|
||||
.unwrap_or_else(|| "/tmp/compliance-scanner/repos".to_string()),
|
||||
keycloak_url: env_var_opt("KEYCLOAK_URL"),
|
||||
keycloak_realm: env_var_opt("KEYCLOAK_REALM"),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
mod agent;
|
||||
mod api;
|
||||
mod config;
|
||||
@@ -15,14 +13,10 @@ mod webhooks;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")),
|
||||
)
|
||||
.init();
|
||||
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
let _telemetry_guard = compliance_core::telemetry::init_telemetry("compliance-agent");
|
||||
|
||||
tracing::info!("Loading configuration...");
|
||||
let config = config::load_config()?;
|
||||
|
||||
|
||||
@@ -9,6 +9,15 @@ workspace = true
|
||||
[features]
|
||||
default = ["mongodb"]
|
||||
mongodb = ["dep:mongodb"]
|
||||
telemetry = [
|
||||
"dep:opentelemetry",
|
||||
"dep:opentelemetry_sdk",
|
||||
"dep:opentelemetry-otlp",
|
||||
"dep:opentelemetry-appender-tracing",
|
||||
"dep:tracing-opentelemetry",
|
||||
"dep:tracing-subscriber",
|
||||
"dep:tracing",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
serde = { workspace = true }
|
||||
@@ -21,3 +30,10 @@ uuid = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
bson = { version = "2", features = ["chrono-0_4"] }
|
||||
mongodb = { workspace = true, optional = true }
|
||||
opentelemetry = { version = "0.29", optional = true }
|
||||
opentelemetry_sdk = { version = "0.29", features = ["rt-tokio"], optional = true }
|
||||
opentelemetry-otlp = { version = "0.29", features = ["http", "reqwest-rustls"], optional = true }
|
||||
opentelemetry-appender-tracing = { version = "0.29", optional = true }
|
||||
tracing-opentelemetry = { version = "0.30", optional = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
tracing = { workspace = true, optional = true }
|
||||
|
||||
@@ -24,6 +24,8 @@ pub struct AgentConfig {
|
||||
pub scan_schedule: String,
|
||||
pub cve_monitor_schedule: String,
|
||||
pub git_clone_base_path: String,
|
||||
pub keycloak_url: Option<String>,
|
||||
pub keycloak_realm: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod models;
|
||||
#[cfg(feature = "telemetry")]
|
||||
pub mod telemetry;
|
||||
pub mod traits;
|
||||
|
||||
pub use config::{AgentConfig, DashboardConfig};
|
||||
|
||||
14
compliance-core/src/models/auth.rs
Normal file
14
compliance-core/src/models/auth.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Authentication state returned by the `check_auth` server function.
|
||||
///
|
||||
/// When no valid session exists, `authenticated` is `false` and all
|
||||
/// other fields are empty strings.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)]
|
||||
pub struct AuthInfo {
|
||||
pub authenticated: bool,
|
||||
pub sub: String,
|
||||
pub email: String,
|
||||
pub name: String,
|
||||
pub avatar_url: String,
|
||||
}
|
||||
67
compliance-core/src/models/mcp.rs
Normal file
67
compliance-core/src/models/mcp.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Transport mode for MCP server
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum McpTransport {
|
||||
Stdio,
|
||||
Http,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for McpTransport {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Stdio => write!(f, "stdio"),
|
||||
Self::Http => write!(f, "http"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Status of a running MCP server
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum McpServerStatus {
|
||||
Running,
|
||||
Stopped,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for McpServerStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Running => write!(f, "running"),
|
||||
Self::Stopped => write!(f, "stopped"),
|
||||
Self::Error => write!(f, "error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for a registered MCP server instance
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct McpServerConfig {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<bson::oid::ObjectId>,
|
||||
/// Display name for this MCP server
|
||||
pub name: String,
|
||||
/// Endpoint URL (e.g. https://mcp.example.com/mcp)
|
||||
pub endpoint_url: String,
|
||||
/// Transport type
|
||||
pub transport: McpTransport,
|
||||
/// Port number (for HTTP transport)
|
||||
pub port: Option<u16>,
|
||||
/// Current status
|
||||
pub status: McpServerStatus,
|
||||
/// Bearer access token for authentication
|
||||
pub access_token: String,
|
||||
/// Which tools are enabled on this server
|
||||
pub tools_enabled: Vec<String>,
|
||||
/// Optional description / notes
|
||||
pub description: Option<String>,
|
||||
/// MongoDB URI this server connects to
|
||||
pub mongodb_uri: Option<String>,
|
||||
/// Database name
|
||||
pub mongodb_database: Option<String>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod auth;
|
||||
pub mod chat;
|
||||
pub mod cve;
|
||||
pub mod dast;
|
||||
@@ -5,10 +6,12 @@ pub mod embedding;
|
||||
pub mod finding;
|
||||
pub mod graph;
|
||||
pub mod issue;
|
||||
pub mod mcp;
|
||||
pub mod repository;
|
||||
pub mod sbom;
|
||||
pub mod scan;
|
||||
|
||||
pub use auth::AuthInfo;
|
||||
pub use chat::{ChatMessage, ChatRequest, ChatResponse, SourceReference};
|
||||
pub use cve::{CveAlert, CveSource};
|
||||
pub use dast::{
|
||||
@@ -21,6 +24,7 @@ pub use graph::{
|
||||
CodeEdge, CodeEdgeKind, CodeNode, CodeNodeKind, GraphBuildRun, GraphBuildStatus, ImpactAnalysis,
|
||||
};
|
||||
pub use issue::{IssueStatus, TrackerIssue, TrackerType};
|
||||
pub use mcp::{McpServerConfig, McpServerStatus, McpTransport};
|
||||
pub use repository::{ScanTrigger, TrackedRepository};
|
||||
pub use sbom::{SbomEntry, VulnRef};
|
||||
pub use scan::{ScanPhase, ScanRun, ScanRunStatus, ScanType};
|
||||
|
||||
152
compliance-core/src/telemetry.rs
Normal file
152
compliance-core/src/telemetry.rs
Normal file
@@ -0,0 +1,152 @@
|
||||
//! OpenTelemetry initialization for traces and logs.
|
||||
//!
|
||||
//! Exports traces and logs via OTLP/HTTP when `OTEL_EXPORTER_OTLP_ENDPOINT`
|
||||
//! is set. Always includes a `tracing_subscriber::fmt` layer for console output.
|
||||
//!
|
||||
//! Compatible with SigNoz, Grafana Tempo/Loki, Jaeger, and any OTLP-compatible
|
||||
//! collector.
|
||||
//!
|
||||
//! # Environment Variables
|
||||
//!
|
||||
//! | Variable | Description | Default |
|
||||
//! |---|---|---|
|
||||
//! | `OTEL_EXPORTER_OTLP_ENDPOINT` | OTLP collector endpoint (e.g. `https://otel.example.com`) | *(disabled)* |
|
||||
//! | `OTEL_SERVICE_NAME` | Service name for resource | `service_name` param |
|
||||
//! | `RUST_LOG` / standard `EnvFilter` | Log level filter | `info` |
|
||||
|
||||
use opentelemetry::global;
|
||||
use opentelemetry::trace::TracerProvider as _;
|
||||
use opentelemetry::KeyValue;
|
||||
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
|
||||
use opentelemetry_otlp::{LogExporter, SpanExporter, WithExportConfig};
|
||||
use opentelemetry_sdk::{logs::SdkLoggerProvider, trace::SdkTracerProvider, Resource};
|
||||
use tracing_opentelemetry::OpenTelemetryLayer;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer as _};
|
||||
|
||||
/// Guard that shuts down OTel providers on drop.
|
||||
///
|
||||
/// Must be held for the lifetime of the application. When dropped,
|
||||
/// flushes and shuts down the tracer and logger providers.
|
||||
pub struct TelemetryGuard {
|
||||
tracer_provider: Option<SdkTracerProvider>,
|
||||
logger_provider: Option<SdkLoggerProvider>,
|
||||
}
|
||||
|
||||
impl Drop for TelemetryGuard {
|
||||
fn drop(&mut self) {
|
||||
if let Some(tp) = self.tracer_provider.take() {
|
||||
if let Err(e) = tp.shutdown() {
|
||||
eprintln!("Failed to shutdown tracer provider: {e}");
|
||||
}
|
||||
}
|
||||
if let Some(lp) = self.logger_provider.take() {
|
||||
if let Err(e) = lp.shutdown() {
|
||||
eprintln!("Failed to shutdown logger provider: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_resource(service_name: &str) -> Resource {
|
||||
let name = std::env::var("OTEL_SERVICE_NAME").unwrap_or_else(|_| service_name.to_string());
|
||||
Resource::builder()
|
||||
.with_service_name(name)
|
||||
.with_attributes([KeyValue::new("service.version", env!("CARGO_PKG_VERSION"))])
|
||||
.build()
|
||||
}
|
||||
|
||||
/// Initialize telemetry (tracing + logging).
|
||||
///
|
||||
/// If `OTEL_EXPORTER_OTLP_ENDPOINT` is set, traces and logs are exported
|
||||
/// via OTLP/HTTP. Console fmt output is always enabled.
|
||||
///
|
||||
/// Returns a [`TelemetryGuard`] that must be held alive for the application
|
||||
/// lifetime. Dropping it triggers a graceful shutdown of OTel providers.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the tracing subscriber cannot be initialized (e.g. called twice).
|
||||
pub fn init_telemetry(service_name: &str) -> TelemetryGuard {
|
||||
let otel_endpoint = std::env::var("OTEL_EXPORTER_OTLP_ENDPOINT").ok();
|
||||
|
||||
let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info"));
|
||||
let fmt_layer = tracing_subscriber::fmt::layer();
|
||||
|
||||
match otel_endpoint {
|
||||
Some(ref endpoint) => {
|
||||
let resource = build_resource(service_name);
|
||||
|
||||
let traces_endpoint = format!("{endpoint}/v1/traces");
|
||||
let logs_endpoint = format!("{endpoint}/v1/logs");
|
||||
|
||||
// Traces
|
||||
#[allow(clippy::expect_used)]
|
||||
let span_exporter = SpanExporter::builder()
|
||||
.with_http()
|
||||
.with_endpoint(&traces_endpoint)
|
||||
.build()
|
||||
.expect("failed to create OTLP span exporter");
|
||||
|
||||
let tracer_provider = SdkTracerProvider::builder()
|
||||
.with_batch_exporter(span_exporter)
|
||||
.with_resource(resource.clone())
|
||||
.build();
|
||||
|
||||
global::set_tracer_provider(tracer_provider.clone());
|
||||
let tracer = tracer_provider.tracer(service_name.to_string());
|
||||
let otel_trace_layer = OpenTelemetryLayer::new(tracer);
|
||||
|
||||
// Logs
|
||||
#[allow(clippy::expect_used)]
|
||||
let log_exporter = LogExporter::builder()
|
||||
.with_http()
|
||||
.with_endpoint(&logs_endpoint)
|
||||
.build()
|
||||
.expect("failed to create OTLP log exporter");
|
||||
|
||||
let logger_provider = SdkLoggerProvider::builder()
|
||||
.with_batch_exporter(log_exporter)
|
||||
.with_resource(resource)
|
||||
.build();
|
||||
|
||||
let otel_log_layer = OpenTelemetryTracingBridge::new(&logger_provider);
|
||||
|
||||
// Filter to prevent telemetry-induced-telemetry loops
|
||||
let otel_filter = EnvFilter::new("info")
|
||||
.add_directive("hyper=off".parse().unwrap_or_default())
|
||||
.add_directive("h2=off".parse().unwrap_or_default())
|
||||
.add_directive("reqwest=off".parse().unwrap_or_default());
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(env_filter)
|
||||
.with(fmt_layer)
|
||||
.with(otel_trace_layer)
|
||||
.with(otel_log_layer.with_filter(otel_filter))
|
||||
.init();
|
||||
|
||||
tracing::info!(
|
||||
endpoint = endpoint.as_str(),
|
||||
service = service_name,
|
||||
"OpenTelemetry OTLP/HTTP export enabled"
|
||||
);
|
||||
|
||||
TelemetryGuard {
|
||||
tracer_provider: Some(tracer_provider),
|
||||
logger_provider: Some(logger_provider),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
tracing_subscriber::registry()
|
||||
.with(env_filter)
|
||||
.with(fmt_layer)
|
||||
.init();
|
||||
|
||||
tracing::info!("OpenTelemetry disabled (set OTEL_EXPORTER_OTLP_ENDPOINT to enable)");
|
||||
|
||||
TelemetryGuard {
|
||||
tracer_provider: None,
|
||||
logger_provider: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ server = [
|
||||
"dioxus/router",
|
||||
"dioxus/fullstack",
|
||||
"compliance-core/mongodb",
|
||||
"compliance-core/telemetry",
|
||||
"dep:axum",
|
||||
"dep:mongodb",
|
||||
"dep:reqwest",
|
||||
@@ -27,6 +28,14 @@ server = [
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-fullstack",
|
||||
"dep:tokio",
|
||||
"dep:tower-sessions",
|
||||
"dep:time",
|
||||
"dep:rand",
|
||||
"dep:url",
|
||||
"dep:sha2",
|
||||
"dep:base64",
|
||||
"dep:uuid",
|
||||
"dep:bson",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
@@ -54,3 +63,11 @@ dotenvy = { version = "0.15", optional = true }
|
||||
tokio = { workspace = true, optional = true }
|
||||
dioxus-cli-config = { version = "=0.7.3", optional = true }
|
||||
dioxus-fullstack = { version = "=0.7.3", optional = true }
|
||||
tower-sessions = { version = "0.15", default-features = false, features = ["axum-core", "memory-store", "signed"], optional = true }
|
||||
time = { version = "0.3", default-features = false, optional = true }
|
||||
rand = { version = "0.9", optional = true }
|
||||
url = { version = "2", optional = true }
|
||||
sha2 = { workspace = true, optional = true }
|
||||
base64 = { version = "0.22", optional = true }
|
||||
uuid = { workspace = true, optional = true }
|
||||
bson = { version = "2", features = ["chrono-0_4"], optional = true }
|
||||
|
||||
@@ -38,6 +38,8 @@ pub enum Route {
|
||||
DastFindingsPage {},
|
||||
#[route("/dast/findings/:id")]
|
||||
DastFindingDetailPage { id: String },
|
||||
#[route("/mcp-servers")]
|
||||
McpServersPage {},
|
||||
#[route("/settings")]
|
||||
SettingsPage {},
|
||||
}
|
||||
|
||||
@@ -3,17 +3,41 @@ use dioxus::prelude::*;
|
||||
use crate::app::Route;
|
||||
use crate::components::sidebar::Sidebar;
|
||||
use crate::components::toast::{ToastContainer, Toasts};
|
||||
use crate::infrastructure::auth_check::check_auth;
|
||||
|
||||
#[component]
|
||||
pub fn AppShell() -> Element {
|
||||
use_context_provider(Toasts::new);
|
||||
rsx! {
|
||||
div { class: "app-shell",
|
||||
Sidebar {}
|
||||
main { class: "main-content",
|
||||
Outlet::<Route> {}
|
||||
|
||||
let auth = use_server_future(check_auth)?;
|
||||
|
||||
match auth() {
|
||||
Some(Ok(info)) if info.authenticated => {
|
||||
use_context_provider(|| Signal::new(info.clone()));
|
||||
rsx! {
|
||||
div { class: "app-shell",
|
||||
Sidebar {}
|
||||
main { class: "main-content",
|
||||
Outlet::<Route> {}
|
||||
}
|
||||
ToastContainer {}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Ok(_)) | Some(Err(_)) => {
|
||||
// Not authenticated — redirect to Keycloak login
|
||||
rsx! {
|
||||
document::Script {
|
||||
dangerous_inner_html: "window.location.href = '/auth';"
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
rsx! {
|
||||
div { class: "flex items-center justify-center h-screen bg-gray-950",
|
||||
p { class: "text-gray-400", "Loading..." }
|
||||
}
|
||||
}
|
||||
ToastContainer {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use compliance_core::models::auth::AuthInfo;
|
||||
use dioxus::prelude::*;
|
||||
use dioxus_free_icons::icons::bs_icons::*;
|
||||
use dioxus_free_icons::Icon;
|
||||
@@ -56,6 +57,11 @@ pub fn Sidebar() -> Element {
|
||||
route: Route::DastOverviewPage {},
|
||||
icon: rsx! { Icon { icon: BsBug, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "MCP Servers",
|
||||
route: Route::McpServersPage {},
|
||||
icon: rsx! { Icon { icon: BsPlug, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "Settings",
|
||||
route: Route::SettingsPage {},
|
||||
@@ -63,6 +69,8 @@ pub fn Sidebar() -> Element {
|
||||
},
|
||||
];
|
||||
|
||||
let docs_url = option_env!("DOCS_URL").unwrap_or("/docs");
|
||||
|
||||
let sidebar_class = if collapsed() {
|
||||
"sidebar collapsed"
|
||||
} else {
|
||||
@@ -105,6 +113,15 @@ pub fn Sidebar() -> Element {
|
||||
}
|
||||
}
|
||||
}
|
||||
a {
|
||||
href: "{docs_url}",
|
||||
target: "_blank",
|
||||
class: "nav-item",
|
||||
Icon { icon: BsBook, width: 18, height: 18 }
|
||||
if !collapsed() {
|
||||
span { "Docs" }
|
||||
}
|
||||
}
|
||||
button {
|
||||
class: "sidebar-toggle",
|
||||
onclick: move |_| collapsed.set(!collapsed()),
|
||||
@@ -114,8 +131,31 @@ pub fn Sidebar() -> Element {
|
||||
Icon { icon: BsChevronLeft, width: 14, height: 14 }
|
||||
}
|
||||
}
|
||||
if !collapsed() {
|
||||
div { class: "sidebar-footer", "v0.1.0" }
|
||||
{
|
||||
let auth_info = use_context::<Signal<AuthInfo>>();
|
||||
let info = auth_info();
|
||||
let initials = info.name.chars().next().unwrap_or('U').to_uppercase().to_string();
|
||||
let user_class = if collapsed() { "sidebar-user sidebar-user-collapsed" } else { "sidebar-user" };
|
||||
rsx! {
|
||||
div { class: "{user_class}",
|
||||
div { class: "user-avatar",
|
||||
if info.avatar_url.is_empty() {
|
||||
span { class: "avatar-initials", "{initials}" }
|
||||
} else {
|
||||
img { src: "{info.avatar_url}", alt: "avatar", class: "avatar-img" }
|
||||
}
|
||||
}
|
||||
if !collapsed() {
|
||||
span { class: "user-name", "{info.name}" }
|
||||
}
|
||||
a {
|
||||
href: "/logout",
|
||||
class: if collapsed() { "logout-btn logout-btn-collapsed" } else { "logout-btn" },
|
||||
title: "Sign out",
|
||||
Icon { icon: BsBoxArrowRight, width: 16, height: 16 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
234
compliance-dashboard/src/infrastructure/auth.rs
Normal file
234
compliance-dashboard/src/infrastructure/auth.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use axum::{
|
||||
extract::Query,
|
||||
response::{IntoResponse, Redirect},
|
||||
Extension,
|
||||
};
|
||||
use rand::Rng;
|
||||
use tower_sessions::Session;
|
||||
use url::Url;
|
||||
|
||||
use super::{
|
||||
error::DashboardError,
|
||||
server_state::ServerState,
|
||||
user_state::{User, UserStateInner},
|
||||
};
|
||||
|
||||
pub const LOGGED_IN_USER_SESS_KEY: &str = "logged-in-user";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct PendingOAuthEntry {
|
||||
pub(crate) redirect_url: Option<String>,
|
||||
pub(crate) code_verifier: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct PendingOAuthStore(Arc<RwLock<HashMap<String, PendingOAuthEntry>>>);
|
||||
|
||||
impl PendingOAuthStore {
|
||||
pub(crate) fn insert(&self, state: String, entry: PendingOAuthEntry) {
|
||||
#[allow(clippy::expect_used)]
|
||||
self.0
|
||||
.write()
|
||||
.expect("pending oauth store lock poisoned")
|
||||
.insert(state, entry);
|
||||
}
|
||||
|
||||
pub(crate) fn take(&self, state: &str) -> Option<PendingOAuthEntry> {
|
||||
#[allow(clippy::expect_used)]
|
||||
self.0
|
||||
.write()
|
||||
.expect("pending oauth store lock poisoned")
|
||||
.remove(state)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn generate_state() -> String {
|
||||
let bytes: [u8; 32] = rand::rng().random();
|
||||
bytes.iter().fold(String::with_capacity(64), |mut acc, b| {
|
||||
use std::fmt::Write;
|
||||
let _ = write!(acc, "{b:02x}");
|
||||
acc
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn generate_code_verifier() -> String {
|
||||
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
|
||||
let bytes: [u8; 32] = rand::rng().random();
|
||||
URL_SAFE_NO_PAD.encode(bytes)
|
||||
}
|
||||
|
||||
pub(crate) fn derive_code_challenge(verifier: &str) -> String {
|
||||
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
|
||||
use sha2::{Digest, Sha256};
|
||||
let digest = Sha256::digest(verifier.as_bytes());
|
||||
URL_SAFE_NO_PAD.encode(digest)
|
||||
}
|
||||
|
||||
#[axum::debug_handler]
|
||||
pub async fn auth_login(
|
||||
Extension(state): Extension<ServerState>,
|
||||
Extension(pending): Extension<PendingOAuthStore>,
|
||||
Query(params): Query<HashMap<String, String>>,
|
||||
) -> Result<impl IntoResponse, DashboardError> {
|
||||
let kc = state
|
||||
.keycloak
|
||||
.ok_or(DashboardError::Other("Keycloak not configured".into()))?;
|
||||
let csrf_state = generate_state();
|
||||
let code_verifier = generate_code_verifier();
|
||||
let code_challenge = derive_code_challenge(&code_verifier);
|
||||
|
||||
let redirect_url = params.get("redirect_url").cloned();
|
||||
pending.insert(
|
||||
csrf_state.clone(),
|
||||
PendingOAuthEntry {
|
||||
redirect_url,
|
||||
code_verifier,
|
||||
},
|
||||
);
|
||||
|
||||
let mut url = Url::parse(&kc.auth_endpoint())
|
||||
.map_err(|e| DashboardError::Other(format!("invalid auth endpoint URL: {e}")))?;
|
||||
|
||||
url.query_pairs_mut()
|
||||
.append_pair("client_id", &kc.client_id)
|
||||
.append_pair("redirect_uri", &kc.redirect_uri)
|
||||
.append_pair("response_type", "code")
|
||||
.append_pair("scope", "openid profile email")
|
||||
.append_pair("state", &csrf_state)
|
||||
.append_pair("code_challenge", &code_challenge)
|
||||
.append_pair("code_challenge_method", "S256");
|
||||
|
||||
Ok(Redirect::temporary(url.as_str()))
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct TokenResponse {
|
||||
access_token: String,
|
||||
refresh_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct UserinfoResponse {
|
||||
sub: String,
|
||||
email: Option<String>,
|
||||
preferred_username: Option<String>,
|
||||
name: Option<String>,
|
||||
picture: Option<String>,
|
||||
}
|
||||
|
||||
#[axum::debug_handler]
|
||||
pub async fn auth_callback(
|
||||
session: Session,
|
||||
Extension(state): Extension<ServerState>,
|
||||
Extension(pending): Extension<PendingOAuthStore>,
|
||||
Query(params): Query<HashMap<String, String>>,
|
||||
) -> Result<impl IntoResponse, DashboardError> {
|
||||
let kc = state
|
||||
.keycloak
|
||||
.ok_or(DashboardError::Other("Keycloak not configured".into()))?;
|
||||
|
||||
let returned_state = params
|
||||
.get("state")
|
||||
.ok_or_else(|| DashboardError::Other("missing state parameter".into()))?;
|
||||
|
||||
let entry = pending
|
||||
.take(returned_state)
|
||||
.ok_or_else(|| DashboardError::Other("unknown or expired oauth state".into()))?;
|
||||
|
||||
let code = params
|
||||
.get("code")
|
||||
.ok_or_else(|| DashboardError::Other("missing code parameter".into()))?;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let token_resp = client
|
||||
.post(kc.token_endpoint())
|
||||
.form(&[
|
||||
("grant_type", "authorization_code"),
|
||||
("client_id", kc.client_id.as_str()),
|
||||
("redirect_uri", kc.redirect_uri.as_str()),
|
||||
("code", code),
|
||||
("code_verifier", &entry.code_verifier),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("token request failed: {e}")))?;
|
||||
|
||||
if !token_resp.status().is_success() {
|
||||
let body = token_resp.text().await.unwrap_or_default();
|
||||
return Err(DashboardError::Other(format!(
|
||||
"token exchange failed: {body}"
|
||||
)));
|
||||
}
|
||||
|
||||
let tokens: TokenResponse = token_resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("token parse failed: {e}")))?;
|
||||
|
||||
let userinfo: UserinfoResponse = client
|
||||
.get(kc.userinfo_endpoint())
|
||||
.bearer_auth(&tokens.access_token)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("userinfo request failed: {e}")))?
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("userinfo parse failed: {e}")))?;
|
||||
|
||||
let display_name = userinfo
|
||||
.name
|
||||
.or(userinfo.preferred_username)
|
||||
.unwrap_or_default();
|
||||
|
||||
let user_state = UserStateInner {
|
||||
sub: userinfo.sub,
|
||||
access_token: tokens.access_token,
|
||||
refresh_token: tokens.refresh_token.unwrap_or_default(),
|
||||
user: User {
|
||||
email: userinfo.email.unwrap_or_default(),
|
||||
name: display_name,
|
||||
avatar_url: userinfo.picture.unwrap_or_default(),
|
||||
},
|
||||
};
|
||||
|
||||
session
|
||||
.insert(LOGGED_IN_USER_SESS_KEY, user_state)
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("session insert failed: {e}")))?;
|
||||
|
||||
let target = entry
|
||||
.redirect_url
|
||||
.filter(|u| !u.is_empty())
|
||||
.unwrap_or_else(|| "/".into());
|
||||
|
||||
Ok(Redirect::temporary(&target))
|
||||
}
|
||||
|
||||
#[axum::debug_handler]
|
||||
pub async fn logout(
|
||||
session: Session,
|
||||
Extension(state): Extension<ServerState>,
|
||||
) -> Result<impl IntoResponse, DashboardError> {
|
||||
let kc = state
|
||||
.keycloak
|
||||
.ok_or(DashboardError::Other("Keycloak not configured".into()))?;
|
||||
|
||||
session
|
||||
.flush()
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("session flush failed: {e}")))?;
|
||||
|
||||
let mut url = Url::parse(&kc.logout_endpoint())
|
||||
.map_err(|e| DashboardError::Other(format!("invalid logout endpoint URL: {e}")))?;
|
||||
|
||||
url.query_pairs_mut()
|
||||
.append_pair("client_id", &kc.client_id)
|
||||
.append_pair("post_logout_redirect_uri", &kc.app_url);
|
||||
|
||||
Ok(Redirect::temporary(url.as_str()))
|
||||
}
|
||||
44
compliance-dashboard/src/infrastructure/auth_check.rs
Normal file
44
compliance-dashboard/src/infrastructure/auth_check.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use compliance_core::models::auth::AuthInfo;
|
||||
use dioxus::prelude::*;
|
||||
|
||||
/// Check the current user's authentication state.
|
||||
///
|
||||
/// Reads the tower-sessions session on the server and returns an
|
||||
/// [`AuthInfo`] describing the logged-in user. When no valid session
|
||||
/// exists, `authenticated` is `false` and all other fields are empty.
|
||||
#[server(endpoint = "check-auth")]
|
||||
pub async fn check_auth() -> Result<AuthInfo, ServerFnError> {
|
||||
use super::auth::LOGGED_IN_USER_SESS_KEY;
|
||||
use super::server_state::ServerState;
|
||||
use super::user_state::UserStateInner;
|
||||
use dioxus_fullstack::FullstackContext;
|
||||
|
||||
let state: ServerState = FullstackContext::extract().await?;
|
||||
|
||||
// When Keycloak is not configured, treat as always authenticated
|
||||
if state.keycloak.is_none() {
|
||||
return Ok(AuthInfo {
|
||||
authenticated: true,
|
||||
name: "Local User".into(),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
let session: tower_sessions::Session = FullstackContext::extract().await?;
|
||||
|
||||
let user_state: Option<UserStateInner> = session
|
||||
.get(LOGGED_IN_USER_SESS_KEY)
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(format!("session read failed: {e}")))?;
|
||||
|
||||
match user_state {
|
||||
Some(u) => Ok(AuthInfo {
|
||||
authenticated: true,
|
||||
sub: u.sub,
|
||||
email: u.user.email,
|
||||
name: u.user.name,
|
||||
avatar_url: u.user.avatar_url,
|
||||
}),
|
||||
None => Ok(AuthInfo::default()),
|
||||
}
|
||||
}
|
||||
45
compliance-dashboard/src/infrastructure/auth_middleware.rs
Normal file
45
compliance-dashboard/src/infrastructure/auth_middleware.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use axum::{
|
||||
extract::Request,
|
||||
middleware::Next,
|
||||
response::{IntoResponse, Response},
|
||||
Extension,
|
||||
};
|
||||
use reqwest::StatusCode;
|
||||
use tower_sessions::Session;
|
||||
|
||||
use super::auth::LOGGED_IN_USER_SESS_KEY;
|
||||
use super::server_state::ServerState;
|
||||
use super::user_state::UserStateInner;
|
||||
|
||||
const PUBLIC_API_ENDPOINTS: &[&str] = &["/api/check-auth"];
|
||||
|
||||
/// Axum middleware that enforces authentication on `/api/` server
|
||||
/// function endpoints. Skips auth entirely when Keycloak is not configured.
|
||||
pub async fn require_auth(
|
||||
Extension(state): Extension<ServerState>,
|
||||
session: Session,
|
||||
request: Request,
|
||||
next: Next,
|
||||
) -> Response {
|
||||
// Skip auth when Keycloak is not configured
|
||||
if state.keycloak.is_none() {
|
||||
return next.run(request).await;
|
||||
}
|
||||
|
||||
let path = request.uri().path();
|
||||
|
||||
if path.starts_with("/api/") && !PUBLIC_API_ENDPOINTS.contains(&path) {
|
||||
let is_authed = session
|
||||
.get::<UserStateInner>(LOGGED_IN_USER_SESS_KEY)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some();
|
||||
|
||||
if !is_authed {
|
||||
return (StatusCode::UNAUTHORIZED, "Authentication required").into_response();
|
||||
}
|
||||
}
|
||||
|
||||
next.run(request).await
|
||||
}
|
||||
@@ -42,4 +42,8 @@ impl Database {
|
||||
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
|
||||
self.inner.collection("tracker_issues")
|
||||
}
|
||||
|
||||
pub fn mcp_servers(&self) -> Collection<McpServerConfig> {
|
||||
self.inner.collection("mcp_servers")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,3 +24,14 @@ impl From<DashboardError> for ServerFnError {
|
||||
ServerFnError::new(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
impl axum::response::IntoResponse for DashboardError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
(
|
||||
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
|
||||
self.to_string(),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
54
compliance-dashboard/src/infrastructure/keycloak_config.rs
Normal file
54
compliance-dashboard/src/infrastructure/keycloak_config.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
/// Keycloak OpenID Connect settings.
|
||||
#[derive(Debug)]
|
||||
pub struct KeycloakConfig {
|
||||
pub url: String,
|
||||
pub realm: String,
|
||||
pub client_id: String,
|
||||
pub redirect_uri: String,
|
||||
pub app_url: String,
|
||||
}
|
||||
|
||||
impl KeycloakConfig {
|
||||
pub fn from_env() -> Option<Self> {
|
||||
let url = std::env::var("KEYCLOAK_URL").ok()?;
|
||||
let realm = std::env::var("KEYCLOAK_REALM").ok()?;
|
||||
let client_id = std::env::var("KEYCLOAK_CLIENT_ID").ok()?;
|
||||
let redirect_uri = std::env::var("REDIRECT_URI").ok()?;
|
||||
let app_url = std::env::var("APP_URL").ok()?;
|
||||
Some(Self {
|
||||
url,
|
||||
realm,
|
||||
client_id,
|
||||
redirect_uri,
|
||||
app_url,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn auth_endpoint(&self) -> String {
|
||||
format!(
|
||||
"{}/realms/{}/protocol/openid-connect/auth",
|
||||
self.url, self.realm
|
||||
)
|
||||
}
|
||||
|
||||
pub fn token_endpoint(&self) -> String {
|
||||
format!(
|
||||
"{}/realms/{}/protocol/openid-connect/token",
|
||||
self.url, self.realm
|
||||
)
|
||||
}
|
||||
|
||||
pub fn userinfo_endpoint(&self) -> String {
|
||||
format!(
|
||||
"{}/realms/{}/protocol/openid-connect/userinfo",
|
||||
self.url, self.realm
|
||||
)
|
||||
}
|
||||
|
||||
pub fn logout_endpoint(&self) -> String {
|
||||
format!(
|
||||
"{}/realms/{}/protocol/openid-connect/logout",
|
||||
self.url, self.realm
|
||||
)
|
||||
}
|
||||
}
|
||||
160
compliance-dashboard/src/infrastructure/mcp.rs
Normal file
160
compliance-dashboard/src/infrastructure/mcp.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::McpServerConfig;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct McpServersResponse {
|
||||
pub data: Vec<McpServerConfig>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_mcp_servers() -> Result<McpServersResponse, ServerFnError> {
|
||||
use mongodb::bson::doc;
|
||||
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let mut cursor = state
|
||||
.db
|
||||
.mcp_servers()
|
||||
.find(doc! {})
|
||||
.sort(doc! { "created_at": -1 })
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
let mut data = Vec::new();
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?
|
||||
{
|
||||
let server = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
data.push(server);
|
||||
}
|
||||
|
||||
Ok(McpServersResponse { data })
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn add_mcp_server(
|
||||
name: String,
|
||||
endpoint_url: String,
|
||||
transport: String,
|
||||
port: String,
|
||||
description: String,
|
||||
mongodb_uri: String,
|
||||
mongodb_database: String,
|
||||
) -> Result<(), ServerFnError> {
|
||||
use chrono::Utc;
|
||||
use compliance_core::models::{McpServerStatus, McpTransport};
|
||||
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let transport = match transport.as_str() {
|
||||
"http" => McpTransport::Http,
|
||||
_ => McpTransport::Stdio,
|
||||
};
|
||||
|
||||
let port_num: Option<u16> = port.parse().ok();
|
||||
|
||||
// Generate a random access token
|
||||
let token = format!("mcp_{}", uuid::Uuid::new_v4().to_string().replace('-', ""));
|
||||
|
||||
let all_tools = vec![
|
||||
"list_findings".to_string(),
|
||||
"get_finding".to_string(),
|
||||
"findings_summary".to_string(),
|
||||
"list_sbom_packages".to_string(),
|
||||
"sbom_vuln_report".to_string(),
|
||||
"list_dast_findings".to_string(),
|
||||
"dast_scan_summary".to_string(),
|
||||
];
|
||||
|
||||
let now = Utc::now();
|
||||
let server = McpServerConfig {
|
||||
id: None,
|
||||
name,
|
||||
endpoint_url,
|
||||
transport,
|
||||
port: port_num,
|
||||
status: McpServerStatus::Stopped,
|
||||
access_token: token,
|
||||
tools_enabled: all_tools,
|
||||
description: if description.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(description)
|
||||
},
|
||||
mongodb_uri: if mongodb_uri.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(mongodb_uri)
|
||||
},
|
||||
mongodb_database: if mongodb_database.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(mongodb_database)
|
||||
},
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
|
||||
state
|
||||
.db
|
||||
.mcp_servers()
|
||||
.insert_one(server)
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn delete_mcp_server(server_id: String) -> Result<(), ServerFnError> {
|
||||
use mongodb::bson::doc;
|
||||
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let oid = bson::oid::ObjectId::parse_str(&server_id)
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
state
|
||||
.db
|
||||
.mcp_servers()
|
||||
.delete_one(doc! { "_id": oid })
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn regenerate_mcp_token(server_id: String) -> Result<String, ServerFnError> {
|
||||
use chrono::Utc;
|
||||
use mongodb::bson::doc;
|
||||
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let oid = bson::oid::ObjectId::parse_str(&server_id)
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
let new_token = format!("mcp_{}", uuid::Uuid::new_v4().to_string().replace('-', ""));
|
||||
|
||||
state
|
||||
.db
|
||||
.mcp_servers()
|
||||
.update_one(
|
||||
doc! { "_id": oid },
|
||||
doc! { "$set": { "access_token": &new_token, "updated_at": Utc::now().to_rfc3339() } },
|
||||
)
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
Ok(new_token)
|
||||
}
|
||||
@@ -1,10 +1,12 @@
|
||||
// Server function modules (compiled for both web and server;
|
||||
// the #[server] macro generates client stubs for the web target)
|
||||
pub mod auth_check;
|
||||
pub mod chat;
|
||||
pub mod dast;
|
||||
pub mod findings;
|
||||
pub mod graph;
|
||||
pub mod issues;
|
||||
pub mod mcp;
|
||||
pub mod repositories;
|
||||
pub mod sbom;
|
||||
pub mod scans;
|
||||
@@ -12,15 +14,27 @@ pub mod stats;
|
||||
|
||||
// Server-only modules
|
||||
#[cfg(feature = "server")]
|
||||
mod auth;
|
||||
#[cfg(feature = "server")]
|
||||
mod auth_middleware;
|
||||
#[cfg(feature = "server")]
|
||||
pub mod config;
|
||||
#[cfg(feature = "server")]
|
||||
pub mod database;
|
||||
#[cfg(feature = "server")]
|
||||
pub mod error;
|
||||
#[cfg(feature = "server")]
|
||||
pub mod server;
|
||||
pub mod keycloak_config;
|
||||
#[cfg(feature = "server")]
|
||||
mod server;
|
||||
#[cfg(feature = "server")]
|
||||
pub mod server_state;
|
||||
#[cfg(feature = "server")]
|
||||
mod user_state;
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
pub use auth::{auth_callback, auth_login, logout, PendingOAuthStore};
|
||||
#[cfg(feature = "server")]
|
||||
pub use auth_middleware::require_auth;
|
||||
#[cfg(feature = "server")]
|
||||
pub use server::server_start;
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
use axum::routing::get;
|
||||
use axum::{middleware, Extension};
|
||||
use dioxus::prelude::*;
|
||||
use time::Duration;
|
||||
use tower_sessions::{cookie::Key, MemoryStore, SessionManagerLayer};
|
||||
|
||||
use super::config;
|
||||
use super::database::Database;
|
||||
use super::error::DashboardError;
|
||||
use super::keycloak_config::KeycloakConfig;
|
||||
use super::server_state::{ServerState, ServerStateInner};
|
||||
use super::{auth_callback, auth_login, logout, require_auth, PendingOAuthStore};
|
||||
|
||||
pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
|
||||
tokio::runtime::Runtime::new()
|
||||
@@ -12,16 +18,35 @@ pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
let config = config::load_config()?;
|
||||
let keycloak: Option<&'static KeycloakConfig> =
|
||||
KeycloakConfig::from_env().map(|kc| &*Box::leak(Box::new(kc)));
|
||||
let db = Database::connect(&config.mongodb_uri, &config.mongodb_database).await?;
|
||||
|
||||
if let Some(kc) = keycloak {
|
||||
tracing::info!("Keycloak configured for realm '{}'", kc.realm);
|
||||
} else {
|
||||
tracing::warn!("Keycloak not configured - dashboard is unprotected");
|
||||
}
|
||||
|
||||
let server_state: ServerState = ServerStateInner {
|
||||
agent_api_url: config.agent_api_url.clone(),
|
||||
db,
|
||||
config,
|
||||
keycloak,
|
||||
}
|
||||
.into();
|
||||
|
||||
let addr = dioxus_cli_config::fullstack_address_or_localhost();
|
||||
// Session layer
|
||||
let key = Key::generate();
|
||||
let store = MemoryStore::default();
|
||||
let session = SessionManagerLayer::new(store)
|
||||
.with_secure(false)
|
||||
.with_same_site(tower_sessions::cookie::SameSite::Lax)
|
||||
.with_expiry(tower_sessions::Expiry::OnInactivity(Duration::hours(24)))
|
||||
.with_signed(key);
|
||||
|
||||
let port = dioxus_cli_config::server_port().unwrap_or(8080);
|
||||
let addr = std::net::SocketAddr::from(([0, 0, 0, 0], port));
|
||||
let listener = tokio::net::TcpListener::bind(addr)
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("Failed to bind: {e}")))?;
|
||||
@@ -29,8 +54,14 @@ pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
|
||||
tracing::info!("Dashboard server listening on {addr}");
|
||||
|
||||
let router = axum::Router::new()
|
||||
.route("/auth", get(auth_login))
|
||||
.route("/auth/callback", get(auth_callback))
|
||||
.route("/logout", get(logout))
|
||||
.serve_dioxus_application(ServeConfig::new(), app)
|
||||
.layer(axum::Extension(server_state));
|
||||
.layer(Extension(PendingOAuthStore::default()))
|
||||
.layer(middleware::from_fn(require_auth))
|
||||
.layer(Extension(server_state))
|
||||
.layer(session);
|
||||
|
||||
axum::serve(listener, router.into_make_service())
|
||||
.await
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::sync::Arc;
|
||||
use compliance_core::DashboardConfig;
|
||||
|
||||
use super::database::Database;
|
||||
use super::keycloak_config::KeycloakConfig;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ServerState(Arc<ServerStateInner>);
|
||||
@@ -19,6 +20,7 @@ pub struct ServerStateInner {
|
||||
pub db: Database,
|
||||
pub config: DashboardConfig,
|
||||
pub agent_api_url: String,
|
||||
pub keycloak: Option<&'static KeycloakConfig>,
|
||||
}
|
||||
|
||||
impl From<ServerStateInner> for ServerState {
|
||||
|
||||
18
compliance-dashboard/src/infrastructure/user_state.rs
Normal file
18
compliance-dashboard/src/infrastructure/user_state.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Per-session user data stored in the tower-sessions session store.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct UserStateInner {
|
||||
pub sub: String,
|
||||
pub access_token: String,
|
||||
pub refresh_token: String,
|
||||
pub user: User,
|
||||
}
|
||||
|
||||
/// Basic user profile stored alongside the session.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct User {
|
||||
pub email: String,
|
||||
pub name: String,
|
||||
pub avatar_url: String,
|
||||
}
|
||||
328
compliance-dashboard/src/pages/mcp_servers.rs
Normal file
328
compliance-dashboard/src/pages/mcp_servers.rs
Normal file
@@ -0,0 +1,328 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::toast::{ToastType, Toasts};
|
||||
use crate::infrastructure::mcp::{
|
||||
add_mcp_server, delete_mcp_server, fetch_mcp_servers, regenerate_mcp_token,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn McpServersPage() -> Element {
|
||||
let mut servers = use_resource(|| async { fetch_mcp_servers().await.ok() });
|
||||
let mut toasts = use_context::<Toasts>();
|
||||
|
||||
let mut show_form = use_signal(|| false);
|
||||
let mut new_name = use_signal(String::new);
|
||||
let mut new_endpoint = use_signal(String::new);
|
||||
let mut new_transport = use_signal(|| "http".to_string());
|
||||
let mut new_port = use_signal(|| "8090".to_string());
|
||||
let mut new_description = use_signal(String::new);
|
||||
let mut new_mongo_uri = use_signal(String::new);
|
||||
let mut new_mongo_db = use_signal(String::new);
|
||||
|
||||
// Track which server's token is visible
|
||||
let mut visible_token: Signal<Option<String>> = use_signal(|| None);
|
||||
// Track which server is pending delete confirmation
|
||||
let mut confirm_delete: Signal<Option<(String, String)>> = use_signal(|| None);
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "MCP Servers",
|
||||
description: "Manage Model Context Protocol servers for LLM integrations",
|
||||
}
|
||||
|
||||
div { class: "mb-4",
|
||||
button {
|
||||
class: "btn btn-primary",
|
||||
onclick: move |_| show_form.set(!show_form()),
|
||||
if show_form() { "Cancel" } else { "Register Server" }
|
||||
}
|
||||
}
|
||||
|
||||
if show_form() {
|
||||
div { class: "card mb-4",
|
||||
div { class: "card-header", "Register MCP Server" }
|
||||
div { class: "mcp-form-grid",
|
||||
div { class: "form-group",
|
||||
label { "Name" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "Production MCP",
|
||||
value: "{new_name}",
|
||||
oninput: move |e| new_name.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Endpoint URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "https://mcp.example.com/mcp",
|
||||
value: "{new_endpoint}",
|
||||
oninput: move |e| new_endpoint.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Transport" }
|
||||
select {
|
||||
value: "{new_transport}",
|
||||
oninput: move |e| new_transport.set(e.value()),
|
||||
option { value: "http", "HTTP (Streamable)" }
|
||||
option { value: "stdio", "Stdio" }
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Port" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "8090",
|
||||
value: "{new_port}",
|
||||
oninput: move |e| new_port.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "MongoDB URI" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "mongodb://localhost:27017",
|
||||
value: "{new_mongo_uri}",
|
||||
oninput: move |e| new_mongo_uri.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Database Name" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "compliance_scanner",
|
||||
value: "{new_mongo_db}",
|
||||
oninput: move |e| new_mongo_db.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Description" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "Optional notes about this server",
|
||||
value: "{new_description}",
|
||||
oninput: move |e| new_description.set(e.value()),
|
||||
}
|
||||
}
|
||||
button {
|
||||
class: "btn btn-primary",
|
||||
onclick: move |_| {
|
||||
let name = new_name();
|
||||
let endpoint = new_endpoint();
|
||||
let transport = new_transport();
|
||||
let port = new_port();
|
||||
let desc = new_description();
|
||||
let mongo_uri = new_mongo_uri();
|
||||
let mongo_db = new_mongo_db();
|
||||
spawn(async move {
|
||||
match add_mcp_server(name, endpoint, transport, port, desc, mongo_uri, mongo_db).await {
|
||||
Ok(_) => {
|
||||
toasts.push(ToastType::Success, "MCP server registered");
|
||||
servers.restart();
|
||||
}
|
||||
Err(e) => toasts.push(ToastType::Error, e.to_string()),
|
||||
}
|
||||
});
|
||||
show_form.set(false);
|
||||
new_name.set(String::new());
|
||||
new_endpoint.set(String::new());
|
||||
new_transport.set("http".to_string());
|
||||
new_port.set("8090".to_string());
|
||||
new_description.set(String::new());
|
||||
new_mongo_uri.set(String::new());
|
||||
new_mongo_db.set(String::new());
|
||||
},
|
||||
"Register"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete confirmation modal
|
||||
if let Some((ref del_id, ref del_name)) = *confirm_delete.read() {
|
||||
div { class: "modal-overlay",
|
||||
onclick: move |_| confirm_delete.set(None),
|
||||
div { class: "modal-dialog",
|
||||
onclick: move |e| e.stop_propagation(),
|
||||
h3 { "Delete MCP Server" }
|
||||
p { "Are you sure you want to remove " strong { "{del_name}" } "?" }
|
||||
p { class: "text-secondary", "Connected LLM clients will lose access." }
|
||||
div { class: "modal-actions",
|
||||
button {
|
||||
class: "btn btn-ghost",
|
||||
onclick: move |_| confirm_delete.set(None),
|
||||
"Cancel"
|
||||
}
|
||||
button {
|
||||
class: "btn btn-danger",
|
||||
onclick: {
|
||||
let id = del_id.clone();
|
||||
move |_| {
|
||||
let id = id.clone();
|
||||
spawn(async move {
|
||||
match delete_mcp_server(id).await {
|
||||
Ok(_) => {
|
||||
toasts.push(ToastType::Success, "Server removed");
|
||||
servers.restart();
|
||||
}
|
||||
Err(e) => toasts.push(ToastType::Error, e.to_string()),
|
||||
}
|
||||
});
|
||||
confirm_delete.set(None);
|
||||
}
|
||||
},
|
||||
"Delete"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match &*servers.read() {
|
||||
Some(Some(resp)) => {
|
||||
if resp.data.is_empty() {
|
||||
rsx! {
|
||||
div { class: "card",
|
||||
p { class: "text-secondary", "No MCP servers registered. Add one to get started." }
|
||||
}
|
||||
}
|
||||
} else {
|
||||
rsx! {
|
||||
for server in resp.data.iter() {
|
||||
{
|
||||
let sid = server.id.map(|id| id.to_hex()).unwrap_or_default();
|
||||
let name = server.name.clone();
|
||||
let status_class = match server.status {
|
||||
compliance_core::models::McpServerStatus::Running => "mcp-status-running",
|
||||
compliance_core::models::McpServerStatus::Stopped => "mcp-status-stopped",
|
||||
compliance_core::models::McpServerStatus::Error => "mcp-status-error",
|
||||
};
|
||||
let is_token_visible = visible_token().as_deref() == Some(sid.as_str());
|
||||
let created_str = server.created_at.format("%Y-%m-%d %H:%M").to_string();
|
||||
|
||||
rsx! {
|
||||
div { class: "card mcp-server-card mb-4",
|
||||
div { class: "mcp-server-header",
|
||||
div { class: "mcp-server-title",
|
||||
h3 { "{server.name}" }
|
||||
span { class: "mcp-status {status_class}",
|
||||
"{server.status}"
|
||||
}
|
||||
}
|
||||
div { class: "mcp-server-actions",
|
||||
button {
|
||||
class: "btn btn-sm btn-ghost",
|
||||
title: "Delete server",
|
||||
onclick: {
|
||||
let id = sid.clone();
|
||||
let name = name.clone();
|
||||
move |_| {
|
||||
confirm_delete.set(Some((id.clone(), name.clone())));
|
||||
}
|
||||
},
|
||||
"Delete"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref desc) = server.description {
|
||||
p { class: "text-secondary mb-3", "{desc}" }
|
||||
}
|
||||
|
||||
div { class: "mcp-config-grid",
|
||||
div { class: "mcp-config-item",
|
||||
span { class: "mcp-config-label", "Endpoint" }
|
||||
code { class: "mcp-config-value", "{server.endpoint_url}" }
|
||||
}
|
||||
div { class: "mcp-config-item",
|
||||
span { class: "mcp-config-label", "Transport" }
|
||||
span { class: "mcp-config-value", "{server.transport}" }
|
||||
}
|
||||
if let Some(port) = server.port {
|
||||
div { class: "mcp-config-item",
|
||||
span { class: "mcp-config-label", "Port" }
|
||||
span { class: "mcp-config-value", "{port}" }
|
||||
}
|
||||
}
|
||||
if let Some(ref db) = server.mongodb_database {
|
||||
div { class: "mcp-config-item",
|
||||
span { class: "mcp-config-label", "Database" }
|
||||
span { class: "mcp-config-value", "{db}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "mcp-tools-section",
|
||||
span { class: "mcp-config-label", "Enabled Tools" }
|
||||
div { class: "mcp-tools-list",
|
||||
for tool in server.tools_enabled.iter() {
|
||||
span { class: "mcp-tool-badge", "{tool}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "mcp-token-section",
|
||||
span { class: "mcp-config-label", "Access Token" }
|
||||
div { class: "mcp-token-row",
|
||||
code { class: "mcp-token-value",
|
||||
if is_token_visible {
|
||||
"{server.access_token}"
|
||||
} else {
|
||||
"mcp_••••••••••••••••••••••••••••"
|
||||
}
|
||||
}
|
||||
button {
|
||||
class: "btn btn-sm btn-ghost",
|
||||
onclick: {
|
||||
let id = sid.clone();
|
||||
move |_| {
|
||||
if visible_token().as_deref() == Some(id.as_str()) {
|
||||
visible_token.set(None);
|
||||
} else {
|
||||
visible_token.set(Some(id.clone()));
|
||||
}
|
||||
}
|
||||
},
|
||||
if is_token_visible { "Hide" } else { "Reveal" }
|
||||
}
|
||||
button {
|
||||
class: "btn btn-sm btn-ghost",
|
||||
onclick: {
|
||||
let id = sid.clone();
|
||||
move |_| {
|
||||
let id = id.clone();
|
||||
spawn(async move {
|
||||
match regenerate_mcp_token(id).await {
|
||||
Ok(_) => {
|
||||
toasts.push(ToastType::Success, "Token regenerated");
|
||||
servers.restart();
|
||||
}
|
||||
Err(e) => toasts.push(ToastType::Error, e.to_string()),
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
"Regenerate"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "mcp-meta",
|
||||
span { class: "text-secondary",
|
||||
"Created {created_str}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! { div { class: "card", p { "Failed to load MCP servers." } } },
|
||||
None => rsx! { div { class: "card", p { "Loading..." } } },
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ pub mod graph_explorer;
|
||||
pub mod graph_index;
|
||||
pub mod impact_analysis;
|
||||
pub mod issues;
|
||||
pub mod mcp_servers;
|
||||
pub mod overview;
|
||||
pub mod repositories;
|
||||
pub mod sbom;
|
||||
@@ -27,6 +28,7 @@ pub use graph_explorer::GraphExplorerPage;
|
||||
pub use graph_index::GraphIndexPage;
|
||||
pub use impact_analysis::ImpactAnalysisPage;
|
||||
pub use issues::IssuesPage;
|
||||
pub use mcp_servers::McpServersPage;
|
||||
pub use overview::OverviewPage;
|
||||
pub use repositories::RepositoriesPage;
|
||||
pub use sbom::SbomPage;
|
||||
|
||||
21
compliance-mcp/Cargo.toml
Normal file
21
compliance-mcp/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "compliance-mcp"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
compliance-core = { workspace = true, features = ["mongodb"] }
|
||||
rmcp = { version = "0.16", features = ["server", "macros", "transport-io", "transport-streamable-http-server"] }
|
||||
tokio = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
mongodb = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
dotenvy = "0.15"
|
||||
thiserror = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
bson = { version = "2", features = ["chrono-0_4"] }
|
||||
schemars = "1.0"
|
||||
axum = "0.8"
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
34
compliance-mcp/src/database.rs
Normal file
34
compliance-mcp/src/database.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use mongodb::{Client, Collection};
|
||||
|
||||
use compliance_core::models::*;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Database {
|
||||
inner: mongodb::Database,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn connect(uri: &str, db_name: &str) -> Result<Self, mongodb::error::Error> {
|
||||
let client = Client::with_uri_str(uri).await?;
|
||||
let db = client.database(db_name);
|
||||
db.run_command(mongodb::bson::doc! { "ping": 1 }).await?;
|
||||
tracing::info!("MCP server connected to MongoDB '{db_name}'");
|
||||
Ok(Self { inner: db })
|
||||
}
|
||||
|
||||
pub fn findings(&self) -> Collection<Finding> {
|
||||
self.inner.collection("findings")
|
||||
}
|
||||
|
||||
pub fn sbom_entries(&self) -> Collection<SbomEntry> {
|
||||
self.inner.collection("sbom_entries")
|
||||
}
|
||||
|
||||
pub fn dast_findings(&self) -> Collection<DastFinding> {
|
||||
self.inner.collection("dast_findings")
|
||||
}
|
||||
|
||||
pub fn dast_scan_runs(&self) -> Collection<DastScanRun> {
|
||||
self.inner.collection("dast_scan_runs")
|
||||
}
|
||||
}
|
||||
58
compliance-mcp/src/main.rs
Normal file
58
compliance-mcp/src/main.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
mod database;
|
||||
mod server;
|
||||
mod tools;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use database::Database;
|
||||
use rmcp::transport::{
|
||||
streamable_http_server::session::local::LocalSessionManager, StreamableHttpServerConfig,
|
||||
StreamableHttpService,
|
||||
};
|
||||
use server::ComplianceMcpServer;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let _ = dotenvy::dotenv();
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(
|
||||
tracing_subscriber::EnvFilter::from_default_env()
|
||||
.add_directive("compliance_mcp=info".parse()?),
|
||||
)
|
||||
.init();
|
||||
|
||||
let mongo_uri =
|
||||
std::env::var("MONGODB_URI").unwrap_or_else(|_| "mongodb://localhost:27017".to_string());
|
||||
let db_name =
|
||||
std::env::var("MONGODB_DATABASE").unwrap_or_else(|_| "compliance_scanner".to_string());
|
||||
|
||||
let db = Database::connect(&mongo_uri, &db_name).await?;
|
||||
|
||||
// If MCP_PORT is set, run as Streamable HTTP server; otherwise use stdio.
|
||||
if let Ok(port_str) = std::env::var("MCP_PORT") {
|
||||
let port: u16 = port_str.parse()?;
|
||||
tracing::info!("Starting MCP server on HTTP port {port}");
|
||||
|
||||
let db_clone = db.clone();
|
||||
let service = StreamableHttpService::new(
|
||||
move || Ok(ComplianceMcpServer::new(db_clone.clone())),
|
||||
Arc::new(LocalSessionManager::default()),
|
||||
StreamableHttpServerConfig::default(),
|
||||
);
|
||||
|
||||
let router = axum::Router::new().nest_service("/mcp", service);
|
||||
let listener = tokio::net::TcpListener::bind(("0.0.0.0", port)).await?;
|
||||
tracing::info!("MCP HTTP server listening on 0.0.0.0:{port}");
|
||||
axum::serve(listener, router).await?;
|
||||
} else {
|
||||
tracing::info!("Starting MCP server on stdio");
|
||||
let server = ComplianceMcpServer::new(db);
|
||||
let transport = rmcp::transport::stdio();
|
||||
use rmcp::ServiceExt;
|
||||
let handle = server.serve(transport).await?;
|
||||
handle.waiting().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
109
compliance-mcp/src/server.rs
Normal file
109
compliance-mcp/src/server.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use rmcp::{
|
||||
handler::server::wrapper::Parameters, model::*, tool, tool_handler, tool_router, ServerHandler,
|
||||
};
|
||||
|
||||
use crate::database::Database;
|
||||
use crate::tools::{dast, findings, sbom};
|
||||
|
||||
pub struct ComplianceMcpServer {
|
||||
db: Database,
|
||||
#[allow(dead_code)]
|
||||
tool_router: rmcp::handler::server::router::tool::ToolRouter<Self>,
|
||||
}
|
||||
|
||||
#[tool_router]
|
||||
impl ComplianceMcpServer {
|
||||
pub fn new(db: Database) -> Self {
|
||||
Self {
|
||||
db,
|
||||
tool_router: Self::tool_router(),
|
||||
}
|
||||
}
|
||||
|
||||
// ── Findings ──────────────────────────────────────────
|
||||
|
||||
#[tool(
|
||||
description = "List security findings with optional filters for repo, severity, status, and scan type"
|
||||
)]
|
||||
async fn list_findings(
|
||||
&self,
|
||||
Parameters(params): Parameters<findings::ListFindingsParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
findings::list_findings(&self.db, params).await
|
||||
}
|
||||
|
||||
#[tool(description = "Get a single finding by its ID")]
|
||||
async fn get_finding(
|
||||
&self,
|
||||
Parameters(params): Parameters<findings::GetFindingParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
findings::get_finding(&self.db, params).await
|
||||
}
|
||||
|
||||
#[tool(description = "Get a summary of findings counts grouped by severity and status")]
|
||||
async fn findings_summary(
|
||||
&self,
|
||||
Parameters(params): Parameters<findings::FindingsSummaryParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
findings::findings_summary(&self.db, params).await
|
||||
}
|
||||
|
||||
// ── SBOM ──────────────────────────────────────────────
|
||||
|
||||
#[tool(
|
||||
description = "List SBOM packages with optional filters for repo, vulnerabilities, package manager, and license"
|
||||
)]
|
||||
async fn list_sbom_packages(
|
||||
&self,
|
||||
Parameters(params): Parameters<sbom::ListSbomPackagesParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
sbom::list_sbom_packages(&self.db, params).await
|
||||
}
|
||||
|
||||
#[tool(
|
||||
description = "Generate a vulnerability report for a repository showing all packages with known CVEs"
|
||||
)]
|
||||
async fn sbom_vuln_report(
|
||||
&self,
|
||||
Parameters(params): Parameters<sbom::SbomVulnReportParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
sbom::sbom_vuln_report(&self.db, params).await
|
||||
}
|
||||
|
||||
// ── DAST ──────────────────────────────────────────────
|
||||
|
||||
#[tool(
|
||||
description = "List DAST findings with optional filters for target, scan run, severity, exploitability, and vulnerability type"
|
||||
)]
|
||||
async fn list_dast_findings(
|
||||
&self,
|
||||
Parameters(params): Parameters<dast::ListDastFindingsParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
dast::list_dast_findings(&self.db, params).await
|
||||
}
|
||||
|
||||
#[tool(description = "Get a summary of recent DAST scan runs and finding counts")]
|
||||
async fn dast_scan_summary(
|
||||
&self,
|
||||
Parameters(params): Parameters<dast::DastScanSummaryParams>,
|
||||
) -> Result<CallToolResult, rmcp::ErrorData> {
|
||||
dast::dast_scan_summary(&self.db, params).await
|
||||
}
|
||||
}
|
||||
|
||||
#[tool_handler]
|
||||
impl ServerHandler for ComplianceMcpServer {
|
||||
fn get_info(&self) -> ServerInfo {
|
||||
ServerInfo {
|
||||
protocol_version: ProtocolVersion::V_2024_11_05,
|
||||
capabilities: ServerCapabilities::builder()
|
||||
.enable_tools()
|
||||
.build(),
|
||||
server_info: Implementation::from_build_env(),
|
||||
instructions: Some(
|
||||
"Compliance Scanner MCP server. Query security findings, SBOM data, and DAST results."
|
||||
.to_string(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
154
compliance-mcp/src/tools/dast.rs
Normal file
154
compliance-mcp/src/tools/dast.rs
Normal file
@@ -0,0 +1,154 @@
|
||||
use mongodb::bson::doc;
|
||||
use rmcp::{model::*, ErrorData as McpError};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::database::Database;
|
||||
|
||||
const MAX_LIMIT: i64 = 200;
|
||||
const DEFAULT_LIMIT: i64 = 50;
|
||||
|
||||
fn cap_limit(limit: Option<i64>) -> i64 {
|
||||
limit.unwrap_or(DEFAULT_LIMIT).clamp(1, MAX_LIMIT)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct ListDastFindingsParams {
|
||||
/// Filter by DAST target ID
|
||||
pub target_id: Option<String>,
|
||||
/// Filter by scan run ID
|
||||
pub scan_run_id: Option<String>,
|
||||
/// Filter by severity: info, low, medium, high, critical
|
||||
pub severity: Option<String>,
|
||||
/// Only show confirmed exploitable findings
|
||||
pub exploitable: Option<bool>,
|
||||
/// Filter by vulnerability type (e.g. sql_injection, xss, ssrf)
|
||||
pub vuln_type: Option<String>,
|
||||
/// Maximum number of results (default 50, max 200)
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn list_dast_findings(
|
||||
db: &Database,
|
||||
params: ListDastFindingsParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let mut filter = doc! {};
|
||||
if let Some(ref target_id) = params.target_id {
|
||||
filter.insert("target_id", target_id);
|
||||
}
|
||||
if let Some(ref scan_run_id) = params.scan_run_id {
|
||||
filter.insert("scan_run_id", scan_run_id);
|
||||
}
|
||||
if let Some(ref severity) = params.severity {
|
||||
filter.insert("severity", severity);
|
||||
}
|
||||
if let Some(exploitable) = params.exploitable {
|
||||
filter.insert("exploitable", exploitable);
|
||||
}
|
||||
if let Some(ref vuln_type) = params.vuln_type {
|
||||
filter.insert("vuln_type", vuln_type);
|
||||
}
|
||||
|
||||
let limit = cap_limit(params.limit);
|
||||
|
||||
let mut cursor = db
|
||||
.dast_findings()
|
||||
.find(filter)
|
||||
.sort(doc! { "created_at": -1 })
|
||||
.limit(limit)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("cursor error: {e}"), None))?
|
||||
{
|
||||
let finding = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| McpError::internal_error(format!("deserialize error: {e}"), None))?;
|
||||
results.push(finding);
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct DastScanSummaryParams {
|
||||
/// Filter by DAST target ID
|
||||
pub target_id: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn dast_scan_summary(
|
||||
db: &Database,
|
||||
params: DastScanSummaryParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let mut filter = doc! {};
|
||||
if let Some(ref target_id) = params.target_id {
|
||||
filter.insert("target_id", target_id);
|
||||
}
|
||||
|
||||
// Get recent scan runs
|
||||
let mut cursor = db
|
||||
.dast_scan_runs()
|
||||
.find(filter.clone())
|
||||
.sort(doc! { "started_at": -1 })
|
||||
.limit(10)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut scan_runs = Vec::new();
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("cursor error: {e}"), None))?
|
||||
{
|
||||
let run = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| McpError::internal_error(format!("deserialize error: {e}"), None))?;
|
||||
scan_runs.push(serde_json::json!({
|
||||
"id": run.id.map(|id| id.to_hex()),
|
||||
"target_id": run.target_id,
|
||||
"status": run.status,
|
||||
"findings_count": run.findings_count,
|
||||
"exploitable_count": run.exploitable_count,
|
||||
"endpoints_discovered": run.endpoints_discovered,
|
||||
"started_at": run.started_at.to_rfc3339(),
|
||||
"completed_at": run.completed_at.map(|t| t.to_rfc3339()),
|
||||
}));
|
||||
}
|
||||
|
||||
// Count findings by severity
|
||||
let mut findings_filter = doc! {};
|
||||
if let Some(ref target_id) = params.target_id {
|
||||
findings_filter.insert("target_id", target_id);
|
||||
}
|
||||
let total_findings = db
|
||||
.dast_findings()
|
||||
.count_documents(findings_filter.clone())
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut exploitable_filter = findings_filter.clone();
|
||||
exploitable_filter.insert("exploitable", true);
|
||||
let exploitable_count = db
|
||||
.dast_findings()
|
||||
.count_documents(exploitable_filter)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let summary = serde_json::json!({
|
||||
"total_findings": total_findings,
|
||||
"exploitable_findings": exploitable_count,
|
||||
"recent_scan_runs": scan_runs,
|
||||
});
|
||||
|
||||
let json = serde_json::to_string_pretty(&summary)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
163
compliance-mcp/src/tools/findings.rs
Normal file
163
compliance-mcp/src/tools/findings.rs
Normal file
@@ -0,0 +1,163 @@
|
||||
use mongodb::bson::doc;
|
||||
use rmcp::{model::*, ErrorData as McpError};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::database::Database;
|
||||
|
||||
const MAX_LIMIT: i64 = 200;
|
||||
const DEFAULT_LIMIT: i64 = 50;
|
||||
|
||||
fn cap_limit(limit: Option<i64>) -> i64 {
|
||||
limit.unwrap_or(DEFAULT_LIMIT).clamp(1, MAX_LIMIT)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct ListFindingsParams {
|
||||
/// Filter by repository ID
|
||||
pub repo_id: Option<String>,
|
||||
/// Filter by severity: info, low, medium, high, critical
|
||||
pub severity: Option<String>,
|
||||
/// Filter by status: open, triaged, false_positive, resolved, ignored
|
||||
pub status: Option<String>,
|
||||
/// Filter by scan type: sast, sbom, cve, gdpr, oauth
|
||||
pub scan_type: Option<String>,
|
||||
/// Maximum number of results (default 50, max 200)
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn list_findings(
|
||||
db: &Database,
|
||||
params: ListFindingsParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let mut filter = doc! {};
|
||||
if let Some(ref repo_id) = params.repo_id {
|
||||
filter.insert("repo_id", repo_id);
|
||||
}
|
||||
if let Some(ref severity) = params.severity {
|
||||
filter.insert("severity", severity);
|
||||
}
|
||||
if let Some(ref status) = params.status {
|
||||
filter.insert("status", status);
|
||||
}
|
||||
if let Some(ref scan_type) = params.scan_type {
|
||||
filter.insert("scan_type", scan_type);
|
||||
}
|
||||
|
||||
let limit = cap_limit(params.limit);
|
||||
|
||||
let mut cursor = db
|
||||
.findings()
|
||||
.find(filter)
|
||||
.sort(doc! { "created_at": -1 })
|
||||
.limit(limit)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("cursor error: {e}"), None))?
|
||||
{
|
||||
let finding = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| McpError::internal_error(format!("deserialize error: {e}"), None))?;
|
||||
results.push(finding);
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct GetFindingParams {
|
||||
/// Finding ID (MongoDB ObjectId hex string)
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
pub async fn get_finding(
|
||||
db: &Database,
|
||||
params: GetFindingParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let oid = bson::oid::ObjectId::parse_str(¶ms.id)
|
||||
.map_err(|e| McpError::invalid_params(format!("invalid ObjectId: {e}"), None))?;
|
||||
|
||||
let finding = db
|
||||
.findings()
|
||||
.find_one(doc! { "_id": oid })
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?
|
||||
.ok_or_else(|| McpError::invalid_params("finding not found", None))?;
|
||||
|
||||
let json = serde_json::to_string_pretty(&finding)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct FindingsSummaryParams {
|
||||
/// Filter by repository ID
|
||||
pub repo_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct SeverityCount {
|
||||
severity: String,
|
||||
count: u64,
|
||||
}
|
||||
|
||||
pub async fn findings_summary(
|
||||
db: &Database,
|
||||
params: FindingsSummaryParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let mut base_filter = doc! {};
|
||||
if let Some(ref repo_id) = params.repo_id {
|
||||
base_filter.insert("repo_id", repo_id);
|
||||
}
|
||||
|
||||
let severities = ["critical", "high", "medium", "low", "info"];
|
||||
let mut counts = Vec::new();
|
||||
|
||||
for sev in &severities {
|
||||
let mut filter = base_filter.clone();
|
||||
filter.insert("severity", sev);
|
||||
let count = db
|
||||
.findings()
|
||||
.count_documents(filter)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
counts.push(SeverityCount {
|
||||
severity: sev.to_string(),
|
||||
count,
|
||||
});
|
||||
}
|
||||
|
||||
let total: u64 = counts.iter().map(|c| c.count).sum();
|
||||
|
||||
let mut status_counts = Vec::new();
|
||||
for status in &["open", "triaged", "false_positive", "resolved", "ignored"] {
|
||||
let mut filter = base_filter.clone();
|
||||
filter.insert("status", status);
|
||||
let count = db
|
||||
.findings()
|
||||
.count_documents(filter)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
status_counts.push(serde_json::json!({ "status": status, "count": count }));
|
||||
}
|
||||
|
||||
let summary = serde_json::json!({
|
||||
"total": total,
|
||||
"by_severity": counts,
|
||||
"by_status": status_counts,
|
||||
});
|
||||
|
||||
let json = serde_json::to_string_pretty(&summary)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
3
compliance-mcp/src/tools/mod.rs
Normal file
3
compliance-mcp/src/tools/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod dast;
|
||||
pub mod findings;
|
||||
pub mod sbom;
|
||||
129
compliance-mcp/src/tools/sbom.rs
Normal file
129
compliance-mcp/src/tools/sbom.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use mongodb::bson::doc;
|
||||
use rmcp::{model::*, ErrorData as McpError};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::database::Database;
|
||||
|
||||
const MAX_LIMIT: i64 = 200;
|
||||
const DEFAULT_LIMIT: i64 = 50;
|
||||
|
||||
fn cap_limit(limit: Option<i64>) -> i64 {
|
||||
limit.unwrap_or(DEFAULT_LIMIT).clamp(1, MAX_LIMIT)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct ListSbomPackagesParams {
|
||||
/// Filter by repository ID
|
||||
pub repo_id: Option<String>,
|
||||
/// Only show packages with known vulnerabilities
|
||||
pub has_vulns: Option<bool>,
|
||||
/// Filter by package manager (e.g. npm, cargo, pip)
|
||||
pub package_manager: Option<String>,
|
||||
/// Filter by license (e.g. MIT, Apache-2.0)
|
||||
pub license: Option<String>,
|
||||
/// Maximum number of results (default 50, max 200)
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn list_sbom_packages(
|
||||
db: &Database,
|
||||
params: ListSbomPackagesParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let mut filter = doc! {};
|
||||
if let Some(ref repo_id) = params.repo_id {
|
||||
filter.insert("repo_id", repo_id);
|
||||
}
|
||||
if let Some(ref pm) = params.package_manager {
|
||||
filter.insert("package_manager", pm);
|
||||
}
|
||||
if let Some(ref license) = params.license {
|
||||
filter.insert("license", license);
|
||||
}
|
||||
if params.has_vulns == Some(true) {
|
||||
filter.insert("known_vulnerabilities.0", doc! { "$exists": true });
|
||||
}
|
||||
|
||||
let limit = cap_limit(params.limit);
|
||||
|
||||
let mut cursor = db
|
||||
.sbom_entries()
|
||||
.find(filter)
|
||||
.sort(doc! { "name": 1 })
|
||||
.limit(limit)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("cursor error: {e}"), None))?
|
||||
{
|
||||
let entry = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| McpError::internal_error(format!("deserialize error: {e}"), None))?;
|
||||
results.push(entry);
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(&results)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct SbomVulnReportParams {
|
||||
/// Repository ID to generate vulnerability report for
|
||||
pub repo_id: String,
|
||||
}
|
||||
|
||||
pub async fn sbom_vuln_report(
|
||||
db: &Database,
|
||||
params: SbomVulnReportParams,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let filter = doc! {
|
||||
"repo_id": ¶ms.repo_id,
|
||||
"known_vulnerabilities.0": { "$exists": true },
|
||||
};
|
||||
|
||||
let mut cursor = db
|
||||
.sbom_entries()
|
||||
.find(filter)
|
||||
.sort(doc! { "name": 1 })
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("DB error: {e}"), None))?;
|
||||
|
||||
let mut vulnerable_packages = Vec::new();
|
||||
let mut total_vulns = 0u64;
|
||||
|
||||
while cursor
|
||||
.advance()
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(format!("cursor error: {e}"), None))?
|
||||
{
|
||||
let entry = cursor
|
||||
.deserialize_current()
|
||||
.map_err(|e| McpError::internal_error(format!("deserialize error: {e}"), None))?;
|
||||
total_vulns += entry.known_vulnerabilities.len() as u64;
|
||||
vulnerable_packages.push(serde_json::json!({
|
||||
"name": entry.name,
|
||||
"version": entry.version,
|
||||
"package_manager": entry.package_manager,
|
||||
"license": entry.license,
|
||||
"vulnerabilities": entry.known_vulnerabilities,
|
||||
}));
|
||||
}
|
||||
|
||||
let report = serde_json::json!({
|
||||
"repo_id": params.repo_id,
|
||||
"vulnerable_packages_count": vulnerable_packages.len(),
|
||||
"total_vulnerabilities": total_vulns,
|
||||
"packages": vulnerable_packages,
|
||||
});
|
||||
|
||||
let json = serde_json::to_string_pretty(&report)
|
||||
.map_err(|e| McpError::internal_error(format!("json error: {e}"), None))?;
|
||||
|
||||
Ok(CallToolResult::success(vec![Content::text(json)]))
|
||||
}
|
||||
@@ -17,6 +17,9 @@ services:
|
||||
- "3001:3001"
|
||||
- "3002:3002"
|
||||
env_file: .env
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317
|
||||
OTEL_SERVICE_NAME: compliance-agent
|
||||
depends_on:
|
||||
- mongo
|
||||
volumes:
|
||||
@@ -29,6 +32,9 @@ services:
|
||||
ports:
|
||||
- "8080:8080"
|
||||
env_file: .env
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317
|
||||
OTEL_SERVICE_NAME: compliance-dashboard
|
||||
depends_on:
|
||||
- mongo
|
||||
- agent
|
||||
@@ -43,6 +49,15 @@ services:
|
||||
PREBOOT_CHROME: "true"
|
||||
restart: unless-stopped
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib:latest
|
||||
ports:
|
||||
- "4317:4317"
|
||||
- "4318:4318"
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
mongo_data:
|
||||
repos_data:
|
||||
|
||||
3
docs/.gitignore
vendored
Normal file
3
docs/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
.vitepress/dist
|
||||
.vitepress/cache
|
||||
56
docs/.vitepress/config.mts
Normal file
56
docs/.vitepress/config.mts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { defineConfig } from 'vitepress'
|
||||
|
||||
export default defineConfig({
|
||||
title: 'Compliance Scanner',
|
||||
description: 'AI-powered security compliance scanning platform',
|
||||
ignoreDeadLinks: [
|
||||
/localhost/,
|
||||
],
|
||||
themeConfig: {
|
||||
nav: [
|
||||
{ text: 'Guide', link: '/guide/getting-started' },
|
||||
{ text: 'Features', link: '/features/overview' },
|
||||
{ text: 'Deployment', link: '/deployment/docker' },
|
||||
],
|
||||
sidebar: [
|
||||
{
|
||||
text: 'Guide',
|
||||
items: [
|
||||
{ text: 'Getting Started', link: '/guide/getting-started' },
|
||||
{ text: 'Adding Repositories', link: '/guide/repositories' },
|
||||
{ text: 'Running Scans', link: '/guide/scanning' },
|
||||
{ text: 'Managing Findings', link: '/guide/findings' },
|
||||
{ text: 'Configuration', link: '/guide/configuration' },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: 'Features',
|
||||
items: [
|
||||
{ text: 'Dashboard Overview', link: '/features/overview' },
|
||||
{ text: 'SBOM & License Compliance', link: '/features/sbom' },
|
||||
{ text: 'Code Knowledge Graph', link: '/features/graph' },
|
||||
{ text: 'Impact Analysis', link: '/features/impact-analysis' },
|
||||
{ text: 'DAST Scanning', link: '/features/dast' },
|
||||
{ text: 'AI Chat (RAG)', link: '/features/ai-chat' },
|
||||
{ text: 'Issue Tracker Integration', link: '/features/issues' },
|
||||
{ text: 'MCP Server', link: '/features/mcp-server' },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: 'Deployment',
|
||||
items: [
|
||||
{ text: 'Docker Compose', link: '/deployment/docker' },
|
||||
{ text: 'Environment Variables', link: '/deployment/environment' },
|
||||
{ text: 'Keycloak Authentication', link: '/deployment/keycloak' },
|
||||
{ text: 'OpenTelemetry', link: '/deployment/opentelemetry' },
|
||||
],
|
||||
},
|
||||
],
|
||||
socialLinks: [
|
||||
{ icon: 'github', link: 'https://gitea.meghsakha.com/sharang/compliance-scanner-agent' },
|
||||
],
|
||||
footer: {
|
||||
message: 'Compliance Scanner Documentation',
|
||||
},
|
||||
},
|
||||
})
|
||||
125
docs/deployment/docker.md
Normal file
125
docs/deployment/docker.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# Docker Compose Deployment
|
||||
|
||||
The recommended way to deploy Compliance Scanner is with Docker Compose.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Docker and Docker Compose installed
|
||||
- At least 4 GB of available RAM
|
||||
- Git repository access (tokens configured in `.env`)
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repo-url> compliance-scanner
|
||||
cd compliance-scanner
|
||||
|
||||
# Configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env with your MongoDB credentials, tokens, etc.
|
||||
|
||||
# Start all services
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Services
|
||||
|
||||
The `docker-compose.yml` includes these services:
|
||||
|
||||
| Service | Port | Description |
|
||||
|---------|------|-------------|
|
||||
| `mongo` | 27017 | MongoDB database |
|
||||
| `agent` | 3001, 3002 | Compliance agent (REST API + webhooks) |
|
||||
| `dashboard` | 8080 | Web dashboard |
|
||||
| `chromium` | 3003 | Headless browser for DAST crawling |
|
||||
| `otel-collector` | 4317, 4318 | OpenTelemetry collector (optional) |
|
||||
|
||||
## Volumes
|
||||
|
||||
| Volume | Purpose |
|
||||
|--------|---------|
|
||||
| `mongo_data` | Persistent MongoDB data |
|
||||
| `repos_data` | Cloned repository files |
|
||||
|
||||
## Checking Status
|
||||
|
||||
```bash
|
||||
# View running services
|
||||
docker-compose ps
|
||||
|
||||
# View logs
|
||||
docker-compose logs -f agent
|
||||
docker-compose logs -f dashboard
|
||||
|
||||
# Restart a service
|
||||
docker-compose restart agent
|
||||
```
|
||||
|
||||
## Accessing the Dashboard
|
||||
|
||||
Once running, open [http://localhost:8080](http://localhost:8080) in your browser.
|
||||
|
||||
If Keycloak authentication is configured, you'll be redirected to sign in. Otherwise, the dashboard is accessible directly.
|
||||
|
||||
## Updating
|
||||
|
||||
```bash
|
||||
# Pull latest changes
|
||||
git pull
|
||||
|
||||
# Rebuild and restart
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
## Production Considerations
|
||||
|
||||
### MongoDB
|
||||
|
||||
For production, use a managed MongoDB instance or configure replication:
|
||||
|
||||
```bash
|
||||
MONGODB_URI=mongodb+srv://user:pass@cluster.mongodb.net/compliance_scanner
|
||||
```
|
||||
|
||||
### Reverse Proxy
|
||||
|
||||
Place the dashboard behind a reverse proxy (nginx, Caddy, Traefik) with TLS:
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name compliance.example.com;
|
||||
|
||||
ssl_certificate /path/to/cert.pem;
|
||||
ssl_certificate_key /path/to/key.pem;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:8080;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Resource Limits
|
||||
|
||||
Add resource limits to Docker Compose for production:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
agent:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
cpus: '2.0'
|
||||
dashboard:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: '1.0'
|
||||
```
|
||||
93
docs/deployment/environment.md
Normal file
93
docs/deployment/environment.md
Normal file
@@ -0,0 +1,93 @@
|
||||
# Environment Variables
|
||||
|
||||
Complete reference for all environment variables. See [Configuration](/guide/configuration) for detailed descriptions of each variable.
|
||||
|
||||
## Required
|
||||
|
||||
```bash
|
||||
# MongoDB connection
|
||||
MONGODB_URI=mongodb://root:example@localhost:27017/compliance_scanner?authSource=admin
|
||||
```
|
||||
|
||||
## Agent
|
||||
|
||||
```bash
|
||||
AGENT_PORT=3001
|
||||
SCAN_SCHEDULE=0 0 */6 * * *
|
||||
CVE_MONITOR_SCHEDULE=0 0 0 * * *
|
||||
GIT_CLONE_BASE_PATH=/tmp/compliance-scanner/repos
|
||||
MONGODB_DATABASE=compliance_scanner
|
||||
```
|
||||
|
||||
## Dashboard
|
||||
|
||||
```bash
|
||||
DASHBOARD_PORT=8080
|
||||
AGENT_API_URL=http://localhost:3001
|
||||
```
|
||||
|
||||
## LLM / AI
|
||||
|
||||
```bash
|
||||
LITELLM_URL=http://localhost:4000
|
||||
LITELLM_API_KEY=
|
||||
LITELLM_MODEL=gpt-4o
|
||||
LITELLM_EMBED_MODEL=text-embedding-3-small
|
||||
```
|
||||
|
||||
## Git Providers
|
||||
|
||||
```bash
|
||||
# GitHub
|
||||
GITHUB_TOKEN=
|
||||
GITHUB_WEBHOOK_SECRET=
|
||||
|
||||
# GitLab
|
||||
GITLAB_URL=https://gitlab.com
|
||||
GITLAB_TOKEN=
|
||||
GITLAB_WEBHOOK_SECRET=
|
||||
```
|
||||
|
||||
## Issue Trackers
|
||||
|
||||
```bash
|
||||
# Jira
|
||||
JIRA_URL=
|
||||
JIRA_EMAIL=
|
||||
JIRA_API_TOKEN=
|
||||
JIRA_PROJECT_KEY=
|
||||
```
|
||||
|
||||
## External Services
|
||||
|
||||
```bash
|
||||
SEARXNG_URL=http://localhost:8888
|
||||
NVD_API_KEY=
|
||||
```
|
||||
|
||||
## Authentication
|
||||
|
||||
```bash
|
||||
KEYCLOAK_URL=http://localhost:8080
|
||||
KEYCLOAK_REALM=compliance
|
||||
KEYCLOAK_CLIENT_ID=compliance-dashboard
|
||||
REDIRECT_URI=http://localhost:8080/auth/callback
|
||||
APP_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
## MCP Server
|
||||
|
||||
```bash
|
||||
MONGODB_URI=mongodb://root:example@localhost:27017/compliance_scanner?authSource=admin
|
||||
MONGODB_DATABASE=compliance_scanner
|
||||
# Set to enable HTTP transport (omit for stdio)
|
||||
MCP_PORT=8090
|
||||
```
|
||||
|
||||
## Observability
|
||||
|
||||
```bash
|
||||
# Set to enable OpenTelemetry export (omit to disable)
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
|
||||
OTEL_SERVICE_NAME=compliance-agent
|
||||
```
|
||||
104
docs/deployment/keycloak.md
Normal file
104
docs/deployment/keycloak.md
Normal file
@@ -0,0 +1,104 @@
|
||||
# Keycloak Authentication
|
||||
|
||||
Compliance Scanner supports Keycloak for SSO authentication. When configured, all dashboard access requires signing in through Keycloak, and all API endpoints are protected.
|
||||
|
||||
## How It Works
|
||||
|
||||
### Dashboard (OAuth2/OIDC)
|
||||
|
||||
The dashboard implements a standard OAuth2 Authorization Code flow with PKCE:
|
||||
|
||||
1. User visits the dashboard
|
||||
2. If not authenticated, a login page shows with a "Sign in with Keycloak" button
|
||||
3. User is redirected to Keycloak's login page
|
||||
4. After authentication, Keycloak redirects back with an authorization code
|
||||
5. The dashboard exchanges the code for tokens and creates a session
|
||||
6. All subsequent `/api/` server function calls require a valid session
|
||||
|
||||
### Agent API (JWT)
|
||||
|
||||
The agent API validates JWT Bearer tokens from Keycloak:
|
||||
|
||||
1. Dashboard (or other clients) include the access token in requests: `Authorization: Bearer <token>`
|
||||
2. The agent fetches Keycloak's JWKS (JSON Web Key Set) to validate the token signature
|
||||
3. Token expiry and claims are verified
|
||||
4. The health endpoint (`/api/v1/health`) is always public
|
||||
|
||||
If `KEYCLOAK_URL` and `KEYCLOAK_REALM` are not set on the agent, JWT validation is disabled and all endpoints are open.
|
||||
|
||||
## Keycloak Setup
|
||||
|
||||
### 1. Create a Realm
|
||||
|
||||
In the Keycloak admin console:
|
||||
|
||||
1. Create a new realm (e.g. `compliance`)
|
||||
2. Note the realm name — you'll need it for `KEYCLOAK_REALM`
|
||||
|
||||
### 2. Create a Client
|
||||
|
||||
1. Go to **Clients** > **Create client**
|
||||
2. Set:
|
||||
- **Client ID**: `compliance-dashboard`
|
||||
- **Client type**: OpenID Connect
|
||||
- **Client authentication**: Off (public client)
|
||||
3. Under **Settings**:
|
||||
- **Valid redirect URIs**: `http://localhost:8080/auth/callback` (adjust for your domain)
|
||||
- **Valid post logout redirect URIs**: `http://localhost:8080`
|
||||
- **Web origins**: `http://localhost:8080`
|
||||
|
||||
### 3. Create Users
|
||||
|
||||
1. Go to **Users** > **Create user**
|
||||
2. Set username, email, first name, last name
|
||||
3. Under **Credentials**, set a password
|
||||
|
||||
## Environment Variables
|
||||
|
||||
```bash
|
||||
# Keycloak server URL (no trailing slash)
|
||||
KEYCLOAK_URL=http://localhost:8080
|
||||
|
||||
# Realm name
|
||||
KEYCLOAK_REALM=compliance
|
||||
|
||||
# Client ID (must match the client created above)
|
||||
KEYCLOAK_CLIENT_ID=compliance-dashboard
|
||||
|
||||
# OAuth callback URL (must match valid redirect URI in Keycloak)
|
||||
REDIRECT_URI=http://localhost:8080/auth/callback
|
||||
|
||||
# Application root URL (used for post-logout redirect)
|
||||
APP_URL=http://localhost:8080
|
||||
```
|
||||
|
||||
## Dashboard Features
|
||||
|
||||
When authenticated, the dashboard shows:
|
||||
|
||||
- **User avatar** in the sidebar (from Keycloak profile picture, or initials)
|
||||
- **User name** from Keycloak profile
|
||||
- **Logout** link that clears the session and redirects through Keycloak's logout flow
|
||||
|
||||
## Session Configuration
|
||||
|
||||
Sessions use signed cookies with these defaults:
|
||||
|
||||
- **Expiry**: 24 hours of inactivity
|
||||
- **SameSite**: Lax (required for Keycloak redirect flow)
|
||||
- **Secure**: Disabled by default (enable behind HTTPS)
|
||||
- **Storage**: In-memory (resets on server restart)
|
||||
|
||||
::: tip
|
||||
For production, consider persisting sessions to Redis or a database so they survive server restarts.
|
||||
:::
|
||||
|
||||
## Running Without Keycloak
|
||||
|
||||
If no Keycloak variables are set:
|
||||
|
||||
- The **dashboard** serves without authentication (all pages accessible)
|
||||
- The **agent API** accepts all requests without token validation
|
||||
- A warning is logged: `Keycloak not configured - API endpoints are unprotected`
|
||||
|
||||
This is suitable for local development and testing.
|
||||
139
docs/deployment/opentelemetry.md
Normal file
139
docs/deployment/opentelemetry.md
Normal file
@@ -0,0 +1,139 @@
|
||||
# OpenTelemetry Observability
|
||||
|
||||
Compliance Scanner exports traces and logs via OpenTelemetry Protocol (OTLP) for integration with observability platforms like SigNoz, Grafana (Tempo + Loki), Jaeger, and others.
|
||||
|
||||
## Enabling
|
||||
|
||||
Set the `OTEL_EXPORTER_OTLP_ENDPOINT` environment variable to enable OTLP export:
|
||||
|
||||
```bash
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
|
||||
```
|
||||
|
||||
When this variable is not set, telemetry export is disabled and only console logging is active.
|
||||
|
||||
## What Is Exported
|
||||
|
||||
### Traces
|
||||
|
||||
Distributed traces for:
|
||||
|
||||
- HTTP request handling (via `tower-http` `TraceLayer`)
|
||||
- Database operations
|
||||
- Scan pipeline phases
|
||||
- External API calls (LiteLLM, Keycloak, Git providers)
|
||||
|
||||
### Logs
|
||||
|
||||
All `tracing::info!`, `tracing::warn!`, `tracing::error!` log events are exported as OTel log records, including structured fields.
|
||||
|
||||
## Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `OTEL_EXPORTER_OTLP_ENDPOINT` | Collector gRPC endpoint | *(disabled)* |
|
||||
| `OTEL_SERVICE_NAME` | Service name in traces | `compliance-agent` or `compliance-dashboard` |
|
||||
| `RUST_LOG` | Log level filter | `info` |
|
||||
|
||||
## Docker Compose Setup
|
||||
|
||||
The included `docker-compose.yml` provides an OTel Collector service:
|
||||
|
||||
```yaml
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib:latest
|
||||
ports:
|
||||
- "4317:4317" # gRPC
|
||||
- "4318:4318" # HTTP
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otelcol-contrib/config.yaml
|
||||
```
|
||||
|
||||
The agent and dashboard are pre-configured to send telemetry to the collector:
|
||||
|
||||
```yaml
|
||||
agent:
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317
|
||||
OTEL_SERVICE_NAME: compliance-agent
|
||||
|
||||
dashboard:
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4317
|
||||
OTEL_SERVICE_NAME: compliance-dashboard
|
||||
```
|
||||
|
||||
## Collector Configuration
|
||||
|
||||
Edit `otel-collector-config.yaml` to configure your backend. The default exports to debug (stdout) only.
|
||||
|
||||
### SigNoz
|
||||
|
||||
```yaml
|
||||
exporters:
|
||||
otlp/signoz:
|
||||
endpoint: "signoz-otel-collector:4317"
|
||||
tls:
|
||||
insecure: true
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp/signoz]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp/signoz]
|
||||
```
|
||||
|
||||
### Grafana Tempo (Traces) + Loki (Logs)
|
||||
|
||||
```yaml
|
||||
exporters:
|
||||
otlp/tempo:
|
||||
endpoint: "tempo:4317"
|
||||
tls:
|
||||
insecure: true
|
||||
loki:
|
||||
endpoint: "http://loki:3100/loki/api/v1/push"
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp/tempo]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [loki]
|
||||
```
|
||||
|
||||
### Jaeger
|
||||
|
||||
```yaml
|
||||
exporters:
|
||||
otlp/jaeger:
|
||||
endpoint: "jaeger:4317"
|
||||
tls:
|
||||
insecure: true
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [otlp/jaeger]
|
||||
```
|
||||
|
||||
## Verifying
|
||||
|
||||
After starting with telemetry enabled, look for this log on startup:
|
||||
|
||||
```
|
||||
OpenTelemetry OTLP export enabled endpoint=http://otel-collector:4317 service=compliance-agent
|
||||
```
|
||||
|
||||
If the endpoint is unreachable, the application still starts normally — telemetry export fails silently without affecting functionality.
|
||||
79
docs/features/ai-chat.md
Normal file
79
docs/features/ai-chat.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# AI Chat (RAG)
|
||||
|
||||
The AI Chat feature lets you ask natural language questions about your codebase. It uses Retrieval-Augmented Generation (RAG) to find relevant code and provide accurate, source-referenced answers.
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Code graph** is built for the repository (functions, classes, modules)
|
||||
2. **Embeddings** are generated for each code symbol using an LLM embedding model
|
||||
3. When you ask a question, your query is **embedded** and compared against code embeddings
|
||||
4. The **top 8 most relevant** code snippets are retrieved
|
||||
5. These snippets are sent as context to the LLM along with your question
|
||||
6. The LLM generates a response **grounded in your actual code**
|
||||
|
||||
## Getting Started
|
||||
|
||||
### 1. Select a Repository
|
||||
|
||||
Navigate to **AI Chat** in the sidebar. You'll see a grid of repository cards. Click one to open the chat interface.
|
||||
|
||||
### 2. Build Embeddings
|
||||
|
||||
Before chatting, you need to build embeddings for the repository:
|
||||
|
||||
1. Click **Build Embeddings**
|
||||
2. Wait for the process to complete — a progress bar shows `X/Y chunks`
|
||||
3. Once the status shows **Embeddings ready**, the chat input is enabled
|
||||
|
||||
::: info
|
||||
Embedding builds require:
|
||||
- A code graph already built for the repository (via the Graph feature)
|
||||
- A configured embedding model (`LITELLM_EMBED_MODEL`)
|
||||
|
||||
The default model is `text-embedding-3-small`.
|
||||
:::
|
||||
|
||||
### 3. Ask Questions
|
||||
|
||||
Type your question in the input area and press Enter (or click Send). Examples:
|
||||
|
||||
- "How does authentication work in this codebase?"
|
||||
- "What functions handle database connections?"
|
||||
- "Explain the error handling pattern used in this project"
|
||||
- "Where are the API routes defined?"
|
||||
- "What does the `process_scan` function do?"
|
||||
|
||||
## Understanding Responses
|
||||
|
||||
### Answer
|
||||
|
||||
The AI response is a natural language answer to your question, grounded in the actual source code of your repository.
|
||||
|
||||
### Source References
|
||||
|
||||
Below each response, you'll see source references showing exactly which code was used to generate the answer:
|
||||
|
||||
- **Symbol name** — The qualified name of the function/class/module
|
||||
- **File path** — Where the code is located, with line range
|
||||
- **Code snippet** — The first ~10 lines of the relevant code
|
||||
- **Relevance score** — How closely the code matched your question (0.0 to 1.0)
|
||||
|
||||
## Conversation Context
|
||||
|
||||
The chat maintains conversation history within a session. You can ask follow-up questions that reference previous answers. The system sends the last 10 messages as context to maintain coherence.
|
||||
|
||||
## Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `LITELLM_URL` | LiteLLM proxy URL | `http://localhost:4000` |
|
||||
| `LITELLM_API_KEY` | API key for the LLM provider | — |
|
||||
| `LITELLM_MODEL` | Model for chat responses | `gpt-4o` |
|
||||
| `LITELLM_EMBED_MODEL` | Model for code embeddings | `text-embedding-3-small` |
|
||||
|
||||
## Tips
|
||||
|
||||
- **Be specific** — "How does the JWT validation middleware work?" is better than "Tell me about auth"
|
||||
- **Reference filenames** — "What does `server.rs` do?" helps the retrieval find relevant code
|
||||
- **Ask about patterns** — "What error handling pattern does this project use?" works well with RAG
|
||||
- **Rebuild after changes** — If the repository has been updated significantly, rebuild embeddings to include new code
|
||||
112
docs/features/dast.md
Normal file
112
docs/features/dast.md
Normal file
@@ -0,0 +1,112 @@
|
||||
# DAST Scanning
|
||||
|
||||
DAST (Dynamic Application Security Testing) performs black-box security testing against live web applications and APIs. Unlike SAST which analyzes source code, DAST tests running applications by sending crafted requests and analyzing responses.
|
||||
|
||||
## DAST Overview
|
||||
|
||||
Navigate to **DAST** in the sidebar to see the overview page with:
|
||||
|
||||
- Total DAST scans performed
|
||||
- Total DAST findings discovered
|
||||
- Number of active targets
|
||||
- Recent scan run history with status, phase, and finding counts
|
||||
|
||||
## Managing Targets
|
||||
|
||||
Navigate to **DAST > Targets** to configure applications to test.
|
||||
|
||||
### Adding a Target
|
||||
|
||||
1. Enter a **target name** (descriptive label)
|
||||
2. Enter the **base URL** (e.g. `https://staging.example.com`)
|
||||
3. Click **Add Target**
|
||||
|
||||
### Target Configuration
|
||||
|
||||
Each target supports these settings:
|
||||
|
||||
| Setting | Description | Default |
|
||||
|---------|-------------|---------|
|
||||
| **Target Type** | WebApp, REST API, or GraphQL | WebApp |
|
||||
| **Max Crawl Depth** | How many link levels to follow | 5 |
|
||||
| **Rate Limit** | Maximum requests per second | 10 |
|
||||
| **Destructive Tests** | Allow DELETE/PUT requests | No |
|
||||
| **Excluded Paths** | URL paths to skip during testing | — |
|
||||
|
||||
### Authentication
|
||||
|
||||
DAST supports authenticated scanning with multiple methods:
|
||||
|
||||
| Method | Configuration |
|
||||
|--------|--------------|
|
||||
| **None** | No authentication |
|
||||
| **Basic** | Username and password (HTTP Basic Auth) |
|
||||
| **Bearer** | Bearer token (Authorization header) |
|
||||
| **Cookie** | Session cookie value |
|
||||
| **Form** | Login URL, username field, password field, and credentials |
|
||||
|
||||
::: warning
|
||||
Authenticated scans access more of the application surface. Only test applications you own or have explicit authorization to test.
|
||||
:::
|
||||
|
||||
## Running a DAST Scan
|
||||
|
||||
Click the **Scan** button on any target row. The scan runs through these phases:
|
||||
|
||||
1. **Crawl** — Discovers pages, forms, and API endpoints by following links and analyzing JavaScript
|
||||
2. **Test** — Sends attack payloads to discovered parameters
|
||||
3. **Report** — Collects results and generates findings
|
||||
|
||||
The scan uses a headless Chromium browser (the `chromium` service in Docker Compose) for JavaScript rendering during crawling.
|
||||
|
||||
## DAST Scan Agents
|
||||
|
||||
The scanner includes specialized testing agents:
|
||||
|
||||
### API Fuzzer
|
||||
Tests API endpoints with malformed inputs, boundary values, and injection payloads.
|
||||
|
||||
### XSS Scanner
|
||||
Detects Cross-Site Scripting vulnerabilities by injecting script payloads into form fields, URL parameters, and headers.
|
||||
|
||||
### SSRF Scanner
|
||||
Tests for Server-Side Request Forgery by injecting internal URLs and cloud metadata endpoints into parameters.
|
||||
|
||||
### Auth Bypass Scanner
|
||||
Tests for authentication and authorization bypass by manipulating tokens, sessions, and access control headers.
|
||||
|
||||
## DAST Findings
|
||||
|
||||
Navigate to **DAST > Findings** to see all discovered vulnerabilities.
|
||||
|
||||
### Finding List
|
||||
|
||||
Each finding shows:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| Severity | Critical, High, Medium, or Low |
|
||||
| Type | Vulnerability category (SQL Injection, XSS, SSRF, etc.) |
|
||||
| Title | Description of the vulnerability |
|
||||
| Endpoint | The HTTP path that is vulnerable |
|
||||
| Method | HTTP method (GET, POST, PUT, DELETE) |
|
||||
| Exploitable | Whether the vulnerability was confirmed exploitable |
|
||||
|
||||
### Finding Detail
|
||||
|
||||
Click a finding to see full details:
|
||||
|
||||
- **Vulnerability type** and CWE identifier
|
||||
- **Endpoint URL** and HTTP method
|
||||
- **Parameter** that is vulnerable
|
||||
- **Exploitability** — Confirmed or Unconfirmed
|
||||
- **Description** — What the vulnerability is and why it matters
|
||||
- **Remediation** — How to fix the issue
|
||||
- **Evidence** — One or more request/response pairs showing:
|
||||
- The crafted HTTP request (method, URL, headers)
|
||||
- The payload that triggered the vulnerability
|
||||
- The HTTP response status and relevant snippet
|
||||
|
||||
::: tip
|
||||
Findings marked as **Confirmed** exploitable were verified by the scanner with a successful attack. **Unconfirmed** findings show suspicious behavior that may indicate a vulnerability but could not be fully exploited.
|
||||
:::
|
||||
92
docs/features/graph.md
Normal file
92
docs/features/graph.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# Code Knowledge Graph
|
||||
|
||||
The Code Knowledge Graph feature parses your repository source code and builds an interactive graph of symbols (functions, classes, modules) and their relationships (calls, imports, inheritance).
|
||||
|
||||
## Graph Index
|
||||
|
||||
Navigate to **Code Graph** in the sidebar to see all repositories. Click a repository card to open its graph explorer.
|
||||
|
||||
## Building a Graph
|
||||
|
||||
Before exploring, you need to build the graph:
|
||||
|
||||
1. Open the graph explorer for a repository
|
||||
2. Click **Build Graph**
|
||||
3. The agent parses all source files and constructs the graph
|
||||
4. A spinner shows build progress
|
||||
|
||||
The graph builder supports these languages:
|
||||
- Rust
|
||||
- TypeScript
|
||||
- JavaScript
|
||||
- Python
|
||||
|
||||
## Graph Explorer
|
||||
|
||||
The graph explorer provides an interactive network visualization.
|
||||
|
||||
### Canvas
|
||||
|
||||
The main area renders an interactive network diagram using vis-network:
|
||||
|
||||
- **Nodes** represent code symbols (functions, classes, structs, enums, traits, modules, files)
|
||||
- **Edges** represent relationships between symbols
|
||||
- Nodes are **color-coded by community** — clusters of highly connected symbols detected using Louvain community detection
|
||||
- Pan by dragging the background, zoom with scroll wheel
|
||||
|
||||
### Node Types
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| Function | Standalone functions |
|
||||
| Method | Methods on classes/structs |
|
||||
| Class | Classes (TypeScript, Python) |
|
||||
| Struct | Structs (Rust) |
|
||||
| Enum | Enumerations |
|
||||
| Interface | Interfaces (TypeScript) |
|
||||
| Trait | Traits (Rust) |
|
||||
| Module | Modules and namespaces |
|
||||
| File | Source files |
|
||||
|
||||
### Edge Types
|
||||
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| Calls | Function/method invocation |
|
||||
| Imports | Module or symbol import |
|
||||
| Inherits | Class inheritance |
|
||||
| Implements | Interface/trait implementation |
|
||||
| Contains | Parent-child containment (module contains function) |
|
||||
| TypeRef | Type reference or usage |
|
||||
|
||||
### Statistics
|
||||
|
||||
The statistics panel shows:
|
||||
- Total node and edge count
|
||||
- Number of detected communities
|
||||
- Languages found in the repository
|
||||
- File tree of the codebase
|
||||
|
||||
### Search
|
||||
|
||||
Search for symbols by name:
|
||||
|
||||
1. Type at least 2 characters in the search box
|
||||
2. Matching symbols appear in a dropdown
|
||||
3. Click a result to highlight it on the canvas and open the inspector
|
||||
|
||||
### Code Inspector
|
||||
|
||||
When you click a node (on the canvas or from search), the inspector panel shows:
|
||||
|
||||
- **Symbol name** and kind (function, class, etc.)
|
||||
- **File path** with line range
|
||||
- **Source code** excerpt from the file
|
||||
- **Connected nodes** — what this symbol calls, what calls it, etc.
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Onboarding** — Understand unfamiliar codebase structure at a glance
|
||||
- **Architecture review** — Identify tightly coupled modules and circular dependencies
|
||||
- **Security** — Trace data flow from entry points to sensitive operations
|
||||
- **Refactoring** — See what depends on code you plan to change
|
||||
42
docs/features/impact-analysis.md
Normal file
42
docs/features/impact-analysis.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Impact Analysis
|
||||
|
||||
Impact Analysis uses the Code Knowledge Graph to determine the blast radius of a security finding. When a vulnerability is found in a specific function or file, impact analysis traces the call graph to show everything that could be affected.
|
||||
|
||||
## Accessing Impact Analysis
|
||||
|
||||
Impact analysis is linked from the Graph Explorer. When viewing a repository's graph with findings, you can navigate to:
|
||||
|
||||
```
|
||||
/graph/{repo_id}/impact/{finding_id}
|
||||
```
|
||||
|
||||
## What You See
|
||||
|
||||
### Blast Radius
|
||||
|
||||
A count of the total number of code symbols (functions, methods, classes) affected by the vulnerability, both directly and transitively.
|
||||
|
||||
### Entry Points Affected
|
||||
|
||||
A list of **public entry points** — main functions, HTTP handlers, API endpoints — that could be impacted by the vulnerable code. These represent the ways an attacker could potentially reach the vulnerability.
|
||||
|
||||
### Call Chains
|
||||
|
||||
Complete call chain paths showing how execution flows from entry points through intermediate functions to the vulnerable code. Each chain shows the sequence of function calls.
|
||||
|
||||
### Direct Callers
|
||||
|
||||
The immediate functions that call the vulnerable function. These are the first layer of impact.
|
||||
|
||||
## How It Works
|
||||
|
||||
1. The finding's file path and line number are matched to a node in the code graph
|
||||
2. The graph is traversed **backwards** along call edges to find all callers
|
||||
3. Entry points (functions with no callers, or known patterns like `main`, HTTP handlers) are identified
|
||||
4. All paths from entry points to the vulnerable node are computed
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Prioritization** — A critical vulnerability in a function called by 50 entry points is more urgent than one in dead code
|
||||
- **Remediation scoping** — Understand what tests need to run after a fix
|
||||
- **Risk assessment** — Quantify the actual exposure of a vulnerability
|
||||
72
docs/features/issues.md
Normal file
72
docs/features/issues.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Issue Tracker Integration
|
||||
|
||||
Compliance Scanner automatically creates issues in your existing issue trackers when new security findings are discovered. This integrates security into your development workflow without requiring teams to check a separate tool.
|
||||
|
||||
## Supported Trackers
|
||||
|
||||
| Tracker | Configuration Variables |
|
||||
|---------|----------------------|
|
||||
| **GitHub Issues** | `GITHUB_TOKEN` |
|
||||
| **GitLab Issues** | `GITLAB_URL`, `GITLAB_TOKEN` |
|
||||
| **Jira** | `JIRA_URL`, `JIRA_EMAIL`, `JIRA_API_TOKEN`, `JIRA_PROJECT_KEY` |
|
||||
|
||||
## How It Works
|
||||
|
||||
1. A scan discovers new findings
|
||||
2. For each new finding, the agent checks if an issue already exists (by fingerprint)
|
||||
3. If not, it creates an issue in the configured tracker with:
|
||||
- Title matching the finding title
|
||||
- Description with vulnerability details, severity, and file location
|
||||
- Link back to the finding in the dashboard
|
||||
4. The finding is updated with the external issue URL
|
||||
|
||||
## Viewing Issues
|
||||
|
||||
Navigate to **Issues** in the sidebar to see all tracker issues across your repositories.
|
||||
|
||||
The issues table shows:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| Tracker | Badge showing GitHub, GitLab, or Jira |
|
||||
| External ID | Issue number in the external system |
|
||||
| Title | Issue title |
|
||||
| Status | Open, Closed, or tracker-specific status |
|
||||
| Created | When the issue was created |
|
||||
| Link | Direct link to the issue in the external tracker |
|
||||
|
||||
Click the **Open** link to go directly to the issue in GitHub, GitLab, or Jira.
|
||||
|
||||
## Configuration
|
||||
|
||||
### GitHub
|
||||
|
||||
```bash
|
||||
GITHUB_TOKEN=ghp_xxxx
|
||||
```
|
||||
|
||||
Issues are created in the same repository that was scanned.
|
||||
|
||||
### GitLab
|
||||
|
||||
```bash
|
||||
GITLAB_URL=https://gitlab.com
|
||||
GITLAB_TOKEN=glpat-xxxx
|
||||
```
|
||||
|
||||
Issues are created in the same project that was scanned.
|
||||
|
||||
### Jira
|
||||
|
||||
```bash
|
||||
JIRA_URL=https://your-org.atlassian.net
|
||||
JIRA_EMAIL=security-bot@example.com
|
||||
JIRA_API_TOKEN=your-api-token
|
||||
JIRA_PROJECT_KEY=SEC
|
||||
```
|
||||
|
||||
All issues are created in the specified Jira project (`JIRA_PROJECT_KEY`).
|
||||
|
||||
::: tip
|
||||
Use a dedicated service account for issue creation so that security findings are clearly attributed to automated scanning rather than individual team members.
|
||||
:::
|
||||
155
docs/features/mcp-server.md
Normal file
155
docs/features/mcp-server.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# MCP Server
|
||||
|
||||
The Model Context Protocol (MCP) server exposes compliance data to external LLMs and AI agents. Any MCP-compatible client — such as Claude, Cursor, or a custom agent — can connect and query findings, SBOM data, and DAST results without direct database access.
|
||||
|
||||
## How It Works
|
||||
|
||||
The `compliance-mcp` crate runs as a standalone service that connects to the same MongoDB database as the agent and dashboard. It registers a set of **tools** that LLM clients can discover and call through the MCP protocol.
|
||||
|
||||
```
|
||||
LLM Client ──MCP──▶ compliance-mcp ──MongoDB──▶ compliance_scanner DB
|
||||
```
|
||||
|
||||
The server supports two transport modes:
|
||||
|
||||
| Transport | Use Case | How to Enable |
|
||||
|-----------|----------|---------------|
|
||||
| **Stdio** | Local development, piped to a CLI tool | Default (no `MCP_PORT` set) |
|
||||
| **Streamable HTTP** | Remote deployment, multiple clients | Set `MCP_PORT=8090` |
|
||||
|
||||
## Available Tools
|
||||
|
||||
The MCP server exposes seven tools:
|
||||
|
||||
### Findings
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_findings` | Query findings with optional filters for repository, severity, status, and scan type. Returns up to 200 results (default 50). |
|
||||
| `get_finding` | Retrieve a single finding by its MongoDB ObjectId. |
|
||||
| `findings_summary` | Get finding counts grouped by severity and status, optionally filtered by repository. |
|
||||
|
||||
### SBOM
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_sbom_packages` | List SBOM packages with filters for repository, vulnerabilities, package manager, and license. |
|
||||
| `sbom_vuln_report` | Generate a vulnerability report for a repository showing all packages with known CVEs. |
|
||||
|
||||
### DAST
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_dast_findings` | Query DAST findings with filters for target, scan run, severity, exploitability, and vulnerability type. |
|
||||
| `dast_scan_summary` | Get a summary of recent DAST scan runs and finding counts. |
|
||||
|
||||
## Running Locally
|
||||
|
||||
### Stdio Mode
|
||||
|
||||
Run the MCP server directly — it reads from stdin and writes to stdout:
|
||||
|
||||
```bash
|
||||
cd compliance-mcp
|
||||
cargo run
|
||||
```
|
||||
|
||||
Configure your MCP client to launch it as a subprocess. For example, in a Claude Code `mcp.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"compliance": {
|
||||
"command": "cargo",
|
||||
"args": ["run", "-p", "compliance-mcp"],
|
||||
"cwd": "/path/to/compliance-scanner"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### HTTP Mode
|
||||
|
||||
Set `MCP_PORT` to start the Streamable HTTP server:
|
||||
|
||||
```bash
|
||||
MCP_PORT=8090 cargo run -p compliance-mcp
|
||||
```
|
||||
|
||||
The server listens on `http://0.0.0.0:8090/mcp`. Point your MCP client to this endpoint.
|
||||
|
||||
## Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `MONGODB_URI` | MongoDB connection string | `mongodb://localhost:27017` |
|
||||
| `MONGODB_DATABASE` | Database name | `compliance_scanner` |
|
||||
| `MCP_PORT` | Port for HTTP transport (omit for stdio) | — |
|
||||
| `RUST_LOG` | Log level filter | `compliance_mcp=info` |
|
||||
|
||||
Create a `.env` file in the project root or set these as environment variables.
|
||||
|
||||
## Deploying with Docker
|
||||
|
||||
The `Dockerfile.mcp` builds and runs the MCP server in HTTP mode on port 8090.
|
||||
|
||||
```bash
|
||||
docker build -f Dockerfile.mcp -t compliance-mcp .
|
||||
docker run -p 8090:8090 \
|
||||
-e MONGODB_URI=mongodb://mongo:27017 \
|
||||
-e MONGODB_DATABASE=compliance_scanner \
|
||||
-e MCP_PORT=8090 \
|
||||
compliance-mcp
|
||||
```
|
||||
|
||||
### Coolify Deployment
|
||||
|
||||
1. Create a new service in your Coolify project
|
||||
2. Set the **Dockerfile path** to `Dockerfile.mcp`
|
||||
3. Set the **exposed port** to `8090`
|
||||
4. Add environment variables: `MONGODB_URI`, `MONGODB_DATABASE`, `MCP_PORT=8090`
|
||||
5. The MCP endpoint will be available at your configured domain under `/mcp`
|
||||
|
||||
The CI pipeline automatically deploys on changes to `compliance-core/`, `compliance-mcp/`, `Dockerfile.mcp`, or `Cargo.toml`/`Cargo.lock`. Add the `COOLIFY_WEBHOOK_MCP` secret to your Gitea repository.
|
||||
|
||||
## Managing MCP Servers in the Dashboard
|
||||
|
||||
Navigate to **MCP Servers** in the dashboard sidebar to:
|
||||
|
||||
- **Register** MCP server instances with their endpoint URL, transport type, port, and database connection
|
||||
- **View** server configuration, enabled tools, and status
|
||||
- **Manage access tokens** — reveal, copy, or regenerate bearer tokens for authentication
|
||||
- **Delete** servers that are no longer needed
|
||||
|
||||
Each registered server is assigned a random access token on creation. Use this token in your MCP client configuration for authenticated access.
|
||||
|
||||
## Example: Querying Findings from an LLM
|
||||
|
||||
Once connected, an LLM can call any of the registered tools. For example:
|
||||
|
||||
**"Show me all critical findings"** triggers `list_findings` with `severity: "critical"`:
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "list_findings",
|
||||
"arguments": {
|
||||
"severity": "critical",
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**"What vulnerable packages does repo X have?"** triggers `sbom_vuln_report`:
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "sbom_vuln_report",
|
||||
"arguments": {
|
||||
"repo_id": "683abc..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
::: tip
|
||||
The MCP server is read-only — it only queries data from MongoDB. It cannot modify findings, trigger scans, or change configuration. This makes it safe to expose to external LLM clients.
|
||||
:::
|
||||
35
docs/features/overview.md
Normal file
35
docs/features/overview.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Dashboard Overview
|
||||
|
||||
The Overview page is the landing page of the Compliance Scanner dashboard. It gives you a high-level view of your security posture across all tracked repositories.
|
||||
|
||||
## Statistics
|
||||
|
||||
The top section displays key metrics:
|
||||
|
||||
| Metric | Description |
|
||||
|--------|-------------|
|
||||
| **Repositories** | Total number of tracked repositories |
|
||||
| **Total Findings** | Combined count of all security findings |
|
||||
| **Critical** | Findings with critical severity |
|
||||
| **High** | Findings with high severity |
|
||||
| **Medium** | Findings with medium severity |
|
||||
| **Low** | Findings with low severity |
|
||||
| **Dependencies** | Total SBOM entries across all repositories |
|
||||
| **CVE Alerts** | Active CVE alerts from dependency monitoring |
|
||||
| **Tracker Issues** | Issues created in external trackers (GitHub, GitLab, Jira) |
|
||||
|
||||
## Severity Distribution
|
||||
|
||||
A visual bar chart shows the distribution of findings by severity level, giving you an immediate sense of your risk profile.
|
||||
|
||||
## Recent Scan Runs
|
||||
|
||||
The bottom section lists the 10 most recent scan runs across all repositories, showing:
|
||||
|
||||
- Repository name
|
||||
- Scan status (queued, running, completed, failed)
|
||||
- Current phase
|
||||
- Number of findings discovered
|
||||
- Timestamp
|
||||
|
||||
This helps you monitor scanning activity and quickly spot failures.
|
||||
106
docs/features/sbom.md
Normal file
106
docs/features/sbom.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# SBOM & License Compliance
|
||||
|
||||
The SBOM (Software Bill of Materials) feature provides a complete inventory of all dependencies across your repositories, with vulnerability tracking and license compliance analysis.
|
||||
|
||||
The SBOM page has three tabs: **Packages**, **License Compliance**, and **Compare**.
|
||||
|
||||
## Packages Tab
|
||||
|
||||
The packages tab lists all dependencies discovered during scans.
|
||||
|
||||
### Filtering
|
||||
|
||||
Use the filter bar to narrow results:
|
||||
|
||||
- **Repository** — Select a specific repository or view all
|
||||
- **Package Manager** — npm, cargo, pip, go, maven, nuget, composer, gem
|
||||
- **Search** — Filter by package name
|
||||
- **Vulnerabilities** — Show all packages, only those with vulnerabilities, or only clean packages
|
||||
- **License** — Filter by specific license (MIT, Apache-2.0, BSD-3-Clause, GPL-3.0, etc.)
|
||||
|
||||
### Package Details
|
||||
|
||||
Each package row shows:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| Package | Package name |
|
||||
| Version | Installed version |
|
||||
| Manager | Package manager (npm, cargo, pip, etc.) |
|
||||
| License | License identifier with color-coded badge |
|
||||
| Vulnerabilities | Count of known vulnerabilities (click to expand) |
|
||||
|
||||
### Vulnerability Details
|
||||
|
||||
Click the vulnerability count to expand inline details showing:
|
||||
|
||||
- Vulnerability ID (e.g. CVE-2024-1234)
|
||||
- Source database
|
||||
- Severity level
|
||||
- Link to the advisory
|
||||
|
||||
### Export
|
||||
|
||||
Export your SBOM in industry-standard formats:
|
||||
|
||||
1. Select a format:
|
||||
- **CycloneDX 1.5** — JSON format widely supported by security tools
|
||||
- **SPDX 2.3** — Linux Foundation standard for license compliance
|
||||
2. Click **Export**
|
||||
3. The SBOM downloads as a JSON file
|
||||
|
||||
::: tip
|
||||
SBOM exports are useful for compliance audits, customer security questionnaires, and supply chain transparency requirements.
|
||||
:::
|
||||
|
||||
## License Compliance Tab
|
||||
|
||||
The license compliance tab helps you understand your licensing obligations.
|
||||
|
||||
### Copyleft Warning
|
||||
|
||||
If any dependencies use copyleft licenses (GPL, AGPL, LGPL, MPL), a warning banner appears listing the affected packages and noting that they may impose distribution requirements.
|
||||
|
||||
### License Distribution
|
||||
|
||||
A horizontal bar chart visualizes the percentage breakdown of licenses across your dependencies.
|
||||
|
||||
### License Table
|
||||
|
||||
A detailed table lists every license found, with:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| License | License identifier |
|
||||
| Type | **Copyleft** or **Permissive** badge |
|
||||
| Packages | List of packages using this license |
|
||||
| Count | Number of packages |
|
||||
|
||||
**Copyleft licenses** (flagged as potentially restrictive):
|
||||
- GPL-2.0, GPL-3.0
|
||||
- AGPL-3.0
|
||||
- LGPL-2.1, LGPL-3.0
|
||||
- MPL-2.0
|
||||
|
||||
**Permissive licenses** (generally safe for commercial use):
|
||||
- MIT, Apache-2.0, BSD-2-Clause, BSD-3-Clause, ISC, etc.
|
||||
|
||||
## Compare Tab
|
||||
|
||||
Compare the dependency profiles of two repositories side by side.
|
||||
|
||||
1. Select **Repository A** from the first dropdown
|
||||
2. Select **Repository B** from the second dropdown
|
||||
3. View the diff results:
|
||||
|
||||
| Section | Description |
|
||||
|---------|-------------|
|
||||
| **Only in A** | Packages present in repo A but not in repo B |
|
||||
| **Only in B** | Packages present in repo B but not in repo A |
|
||||
| **Version Diffs** | Same package, different versions between repos |
|
||||
| **Common** | Count of packages that match exactly |
|
||||
|
||||
This is useful for:
|
||||
- Auditing consistency across microservices
|
||||
- Identifying dependency drift between environments
|
||||
- Planning dependency upgrades across projects
|
||||
153
docs/guide/configuration.md
Normal file
153
docs/guide/configuration.md
Normal file
@@ -0,0 +1,153 @@
|
||||
# Configuration
|
||||
|
||||
Compliance Scanner is configured through environment variables. Copy `.env.example` to `.env` and edit the values.
|
||||
|
||||
## Required Settings
|
||||
|
||||
### MongoDB
|
||||
|
||||
```bash
|
||||
MONGODB_URI=mongodb://root:example@localhost:27017/compliance_scanner?authSource=admin
|
||||
MONGODB_DATABASE=compliance_scanner
|
||||
```
|
||||
|
||||
### Agent
|
||||
|
||||
```bash
|
||||
AGENT_PORT=3001
|
||||
```
|
||||
|
||||
### Dashboard
|
||||
|
||||
```bash
|
||||
DASHBOARD_PORT=8080
|
||||
AGENT_API_URL=http://localhost:3001
|
||||
```
|
||||
|
||||
## LLM Configuration
|
||||
|
||||
The AI features (chat, remediation suggestions) use LiteLLM as a proxy to various LLM providers:
|
||||
|
||||
```bash
|
||||
LITELLM_URL=http://localhost:4000
|
||||
LITELLM_API_KEY=your-key
|
||||
LITELLM_MODEL=gpt-4o
|
||||
LITELLM_EMBED_MODEL=text-embedding-3-small
|
||||
```
|
||||
|
||||
The embed model is used for the RAG/AI Chat feature to generate code embeddings.
|
||||
|
||||
## Git Provider Tokens
|
||||
|
||||
### GitHub
|
||||
|
||||
```bash
|
||||
GITHUB_TOKEN=ghp_xxxx
|
||||
GITHUB_WEBHOOK_SECRET=your-webhook-secret
|
||||
```
|
||||
|
||||
### GitLab
|
||||
|
||||
```bash
|
||||
GITLAB_URL=https://gitlab.com
|
||||
GITLAB_TOKEN=glpat-xxxx
|
||||
GITLAB_WEBHOOK_SECRET=your-webhook-secret
|
||||
```
|
||||
|
||||
## Issue Tracker Integration
|
||||
|
||||
### Jira
|
||||
|
||||
```bash
|
||||
JIRA_URL=https://your-org.atlassian.net
|
||||
JIRA_EMAIL=user@example.com
|
||||
JIRA_API_TOKEN=your-api-token
|
||||
JIRA_PROJECT_KEY=SEC
|
||||
```
|
||||
|
||||
When configured, new findings automatically create Jira issues in the specified project.
|
||||
|
||||
## Scan Schedules
|
||||
|
||||
Cron expressions for automated scanning:
|
||||
|
||||
```bash
|
||||
# Scan every 6 hours
|
||||
SCAN_SCHEDULE=0 0 */6 * * *
|
||||
|
||||
# Check for new CVEs daily at midnight
|
||||
CVE_MONITOR_SCHEDULE=0 0 0 * * *
|
||||
```
|
||||
|
||||
## Search Engine
|
||||
|
||||
SearXNG is used for CVE enrichment and vulnerability research:
|
||||
|
||||
```bash
|
||||
SEARXNG_URL=http://localhost:8888
|
||||
```
|
||||
|
||||
## NVD API
|
||||
|
||||
An NVD API key increases rate limits for CVE lookups:
|
||||
|
||||
```bash
|
||||
NVD_API_KEY=your-nvd-api-key
|
||||
```
|
||||
|
||||
Get a free key at [https://nvd.nist.gov/developers/request-an-api-key](https://nvd.nist.gov/developers/request-an-api-key).
|
||||
|
||||
## MCP Server
|
||||
|
||||
The MCP server exposes compliance data to external LLMs via the Model Context Protocol. See [MCP Server](/features/mcp-server) for full details.
|
||||
|
||||
```bash
|
||||
# Set MCP_PORT to enable HTTP transport (omit for stdio mode)
|
||||
MCP_PORT=8090
|
||||
```
|
||||
|
||||
The MCP server shares the `MONGODB_URI` and `MONGODB_DATABASE` variables with the rest of the platform.
|
||||
|
||||
## Clone Path
|
||||
|
||||
Where the agent stores cloned repository files:
|
||||
|
||||
```bash
|
||||
GIT_CLONE_BASE_PATH=/tmp/compliance-scanner/repos
|
||||
```
|
||||
|
||||
## All Environment Variables
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
|----------|----------|---------|-------------|
|
||||
| `MONGODB_URI` | Yes | — | MongoDB connection string |
|
||||
| `MONGODB_DATABASE` | No | `compliance_scanner` | Database name |
|
||||
| `AGENT_PORT` | No | `3001` | Agent REST API port |
|
||||
| `DASHBOARD_PORT` | No | `8080` | Dashboard web UI port |
|
||||
| `AGENT_API_URL` | No | `http://localhost:3001` | Agent URL for dashboard |
|
||||
| `LITELLM_URL` | No | `http://localhost:4000` | LiteLLM proxy URL |
|
||||
| `LITELLM_API_KEY` | No | — | LiteLLM API key |
|
||||
| `LITELLM_MODEL` | No | `gpt-4o` | LLM model for analysis |
|
||||
| `LITELLM_EMBED_MODEL` | No | `text-embedding-3-small` | Embedding model for RAG |
|
||||
| `GITHUB_TOKEN` | No | — | GitHub personal access token |
|
||||
| `GITHUB_WEBHOOK_SECRET` | No | — | GitHub webhook signing secret |
|
||||
| `GITLAB_URL` | No | `https://gitlab.com` | GitLab instance URL |
|
||||
| `GITLAB_TOKEN` | No | — | GitLab access token |
|
||||
| `GITLAB_WEBHOOK_SECRET` | No | — | GitLab webhook signing secret |
|
||||
| `JIRA_URL` | No | — | Jira instance URL |
|
||||
| `JIRA_EMAIL` | No | — | Jira account email |
|
||||
| `JIRA_API_TOKEN` | No | — | Jira API token |
|
||||
| `JIRA_PROJECT_KEY` | No | — | Jira project key for issues |
|
||||
| `SEARXNG_URL` | No | `http://localhost:8888` | SearXNG instance URL |
|
||||
| `NVD_API_KEY` | No | — | NVD API key for CVE lookups |
|
||||
| `SCAN_SCHEDULE` | No | `0 0 */6 * * *` | Cron schedule for scans |
|
||||
| `CVE_MONITOR_SCHEDULE` | No | `0 0 0 * * *` | Cron schedule for CVE checks |
|
||||
| `GIT_CLONE_BASE_PATH` | No | `/tmp/compliance-scanner/repos` | Local clone directory |
|
||||
| `KEYCLOAK_URL` | No | — | Keycloak server URL |
|
||||
| `KEYCLOAK_REALM` | No | — | Keycloak realm name |
|
||||
| `KEYCLOAK_CLIENT_ID` | No | — | Keycloak client ID |
|
||||
| `REDIRECT_URI` | No | — | OAuth callback URL |
|
||||
| `APP_URL` | No | — | Application root URL |
|
||||
| `OTEL_EXPORTER_OTLP_ENDPOINT` | No | — | OTLP collector endpoint |
|
||||
| `OTEL_SERVICE_NAME` | No | — | OpenTelemetry service name |
|
||||
| `MCP_PORT` | No | — | MCP HTTP transport port (omit for stdio) |
|
||||
75
docs/guide/findings.md
Normal file
75
docs/guide/findings.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# Managing Findings
|
||||
|
||||
Findings are security issues discovered during scans. The findings workflow lets you triage, track, and resolve vulnerabilities across all your repositories.
|
||||
|
||||
## Findings List
|
||||
|
||||
Navigate to **Findings** in the sidebar to see all findings. The table shows:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| Severity | Color-coded badge: Critical (red), High (orange), Medium (yellow), Low (green) |
|
||||
| Title | Short description of the vulnerability (clickable) |
|
||||
| Type | SAST, SBOM, CVE, GDPR, or OAuth |
|
||||
| Scanner | Tool that found the issue (e.g. semgrep, syft) |
|
||||
| File | Source file path where the issue was found |
|
||||
| Status | Current triage status |
|
||||
|
||||
## Filtering
|
||||
|
||||
Use the filter bar at the top to narrow results:
|
||||
|
||||
- **Repository** — Filter to a specific repository or view all
|
||||
- **Severity** — Critical, High, Medium, Low, or Info
|
||||
- **Type** — SAST, SBOM, CVE, GDPR, OAuth
|
||||
- **Status** — Open, Triaged, Resolved, False Positive, Ignored
|
||||
|
||||
Filters can be combined. Results are paginated with 20 findings per page.
|
||||
|
||||
## Finding Detail
|
||||
|
||||
Click any finding title to view its full detail page, which includes:
|
||||
|
||||
### Metadata
|
||||
- Severity level with CWE identifier and CVSS score (when available)
|
||||
- Scanner tool and scan type
|
||||
- File path and line number
|
||||
|
||||
### Description
|
||||
Full explanation of the vulnerability, why it's a risk, and what conditions trigger it.
|
||||
|
||||
### Code Evidence
|
||||
The source code snippet where the issue was found, with syntax highlighting and the file path.
|
||||
|
||||
### Remediation
|
||||
Step-by-step guidance on how to fix the vulnerability.
|
||||
|
||||
### Suggested Fix
|
||||
A code example showing the corrected implementation.
|
||||
|
||||
### Linked Issue
|
||||
If the finding was pushed to an issue tracker (GitHub, GitLab, Jira), a direct link to the external issue.
|
||||
|
||||
## Updating Status
|
||||
|
||||
On the finding detail page, change the finding's status using the status buttons:
|
||||
|
||||
| Status | When to Use |
|
||||
|--------|-------------|
|
||||
| **Open** | New finding, not yet reviewed |
|
||||
| **Triaged** | Reviewed and confirmed as a real issue, pending fix |
|
||||
| **Resolved** | Fix has been applied |
|
||||
| **False Positive** | Finding is not a real vulnerability in this context |
|
||||
| **Ignored** | Known issue that won't be fixed (accepted risk) |
|
||||
|
||||
Status changes are persisted immediately.
|
||||
|
||||
## Severity Levels
|
||||
|
||||
| Severity | Description | Typical Examples |
|
||||
|----------|-------------|-----------------|
|
||||
| **Critical** | Immediate exploitation risk, data breach likely | SQL injection, RCE, hardcoded secrets |
|
||||
| **High** | Serious vulnerability, exploitation probable | XSS, authentication bypass, SSRF |
|
||||
| **Medium** | Moderate risk, exploitation requires specific conditions | Insecure deserialization, weak crypto |
|
||||
| **Low** | Minor risk, limited impact | Information disclosure, verbose errors |
|
||||
| **Info** | Informational, no direct security impact | Best practice recommendations |
|
||||
55
docs/guide/getting-started.md
Normal file
55
docs/guide/getting-started.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Getting Started
|
||||
|
||||
Compliance Scanner is a security compliance platform that scans your Git repositories for vulnerabilities, builds software bills of materials, performs dynamic application testing, and provides AI-powered code intelligence.
|
||||
|
||||
## Architecture
|
||||
|
||||
The platform consists of three main components:
|
||||
|
||||
- **Agent** — Background service that clones repositories, runs scans, builds graphs, and exposes a REST API
|
||||
- **Dashboard** — Web UI built with Dioxus (Rust full-stack framework) for viewing results and managing repositories
|
||||
- **MongoDB** — Database for storing all scan results, findings, SBOM data, and graph structures
|
||||
|
||||
## Quick Start with Docker Compose
|
||||
|
||||
The fastest way to get running:
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repo-url> compliance-scanner
|
||||
cd compliance-scanner
|
||||
|
||||
# Copy and configure environment variables
|
||||
cp .env.example .env
|
||||
# Edit .env with your settings (see Configuration)
|
||||
|
||||
# Start all services
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
This starts:
|
||||
- MongoDB on port `27017`
|
||||
- Agent API on port `3001`
|
||||
- Dashboard on port `8080`
|
||||
- Chromium (for DAST crawling) on port `3003`
|
||||
|
||||
Open the dashboard at [http://localhost:8080](http://localhost:8080).
|
||||
|
||||
## What Happens During a Scan
|
||||
|
||||
When you add a repository and trigger a scan, the agent runs through these phases:
|
||||
|
||||
1. **Clone** — Clones or pulls the latest code from the Git remote
|
||||
2. **SAST** — Runs static analysis using Semgrep with rules for OWASP, GDPR, OAuth, and general security
|
||||
3. **SBOM** — Extracts all dependencies using Syft, identifying packages, versions, licenses, and known vulnerabilities
|
||||
4. **CVE Check** — Cross-references dependencies against the NVD database for known CVEs
|
||||
5. **Graph Build** — Parses the codebase to construct a code knowledge graph of functions, classes, and their relationships
|
||||
6. **Issue Sync** — Creates or updates issues in connected trackers (GitHub, GitLab, Jira) for new findings
|
||||
|
||||
Each phase produces results visible in the dashboard immediately.
|
||||
|
||||
## Next Steps
|
||||
|
||||
- [Add your first repository](/guide/repositories)
|
||||
- [Understand scan results](/guide/findings)
|
||||
- [Configure integrations](/guide/configuration)
|
||||
62
docs/guide/repositories.md
Normal file
62
docs/guide/repositories.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# Adding Repositories
|
||||
|
||||
Repositories are the core resource in Compliance Scanner. Each tracked repository is scanned on a schedule and its results are available across all features.
|
||||
|
||||
## Adding a Repository
|
||||
|
||||
1. Navigate to **Repositories** in the sidebar
|
||||
2. Click **Add Repository** at the top of the page
|
||||
3. Fill in the form:
|
||||
- **Name** — A display name for the repository
|
||||
- **Git URL** — The clone URL (HTTPS or SSH), e.g. `https://github.com/org/repo.git`
|
||||
- **Default Branch** — The branch to scan, e.g. `main` or `master`
|
||||
4. Click **Add**
|
||||
|
||||
The repository appears in the list immediately. It will not be scanned until you trigger a scan manually or the next scheduled scan runs.
|
||||
|
||||
::: tip
|
||||
For private repositories, configure a GitHub token (`GITHUB_TOKEN`) or GitLab token (`GITLAB_TOKEN`) in your environment. The agent uses these tokens when cloning.
|
||||
:::
|
||||
|
||||
## Repository List
|
||||
|
||||
The repositories page shows all tracked repositories with:
|
||||
|
||||
| Column | Description |
|
||||
|--------|-------------|
|
||||
| Name | Repository display name |
|
||||
| Git URL | Clone URL |
|
||||
| Branch | Default branch being scanned |
|
||||
| Findings | Total number of security findings |
|
||||
| Last Scanned | Relative timestamp of the most recent scan |
|
||||
|
||||
## Triggering a Scan
|
||||
|
||||
Click the **Scan** button on any repository row to trigger an immediate scan. The scan runs in the background through all phases (clone, SAST, SBOM, CVE, graph). You can monitor progress on the Overview page under recent scan runs.
|
||||
|
||||
## Deleting a Repository
|
||||
|
||||
Click the **Delete** button on a repository row. A confirmation dialog appears warning that this action permanently removes:
|
||||
|
||||
- All security findings
|
||||
- SBOM entries and vulnerability data
|
||||
- Scan run history
|
||||
- Code graph data
|
||||
- Embedding vectors (for AI chat)
|
||||
- CVE alerts
|
||||
|
||||
This action cannot be undone.
|
||||
|
||||
## Automatic Scanning
|
||||
|
||||
Repositories are scanned automatically on a schedule configured by the `SCAN_SCHEDULE` environment variable (cron format). The default is every 6 hours:
|
||||
|
||||
```
|
||||
SCAN_SCHEDULE=0 0 */6 * * *
|
||||
```
|
||||
|
||||
CVE monitoring runs on a separate schedule (default: daily at midnight):
|
||||
|
||||
```
|
||||
CVE_MONITOR_SCHEDULE=0 0 0 * * *
|
||||
```
|
||||
83
docs/guide/scanning.md
Normal file
83
docs/guide/scanning.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Running Scans
|
||||
|
||||
Scans are the primary workflow in Compliance Scanner. Each scan analyzes a repository for security vulnerabilities, dependency risks, and code structure.
|
||||
|
||||
## Scan Types
|
||||
|
||||
A full scan consists of multiple phases, each producing different types of findings:
|
||||
|
||||
| Scan Type | What It Detects | Scanner |
|
||||
|-----------|----------------|---------|
|
||||
| **SAST** | Code-level vulnerabilities (injection, XSS, insecure crypto, etc.) | Semgrep |
|
||||
| **SBOM** | Dependency inventory, outdated packages, known vulnerabilities | Syft |
|
||||
| **CVE** | Known CVEs in dependencies cross-referenced against NVD | NVD API |
|
||||
| **GDPR** | Personal data handling issues, consent violations | Custom rules |
|
||||
| **OAuth** | OAuth/OIDC misconfigurations, insecure token handling | Custom rules |
|
||||
|
||||
## Triggering a Scan
|
||||
|
||||
### Manual Scan
|
||||
|
||||
1. Go to **Repositories**
|
||||
2. Click **Scan** on the repository you want to scan
|
||||
3. The scan starts immediately in the background
|
||||
|
||||
### Scheduled Scans
|
||||
|
||||
Scans run automatically based on the `SCAN_SCHEDULE` cron expression. The default scans every 6 hours:
|
||||
|
||||
```
|
||||
SCAN_SCHEDULE=0 0 */6 * * *
|
||||
```
|
||||
|
||||
### Webhook-Triggered Scans
|
||||
|
||||
Configure GitHub or GitLab webhooks to trigger scans on push events. Set the webhook URL to:
|
||||
|
||||
```
|
||||
http://<agent-host>:3002/webhook/github
|
||||
http://<agent-host>:3002/webhook/gitlab
|
||||
```
|
||||
|
||||
And configure the corresponding webhook secret:
|
||||
|
||||
```
|
||||
GITHUB_WEBHOOK_SECRET=your-secret
|
||||
GITLAB_WEBHOOK_SECRET=your-secret
|
||||
```
|
||||
|
||||
## Scan Phases
|
||||
|
||||
Each scan progresses through these phases in order:
|
||||
|
||||
1. **Queued** — Scan is waiting to start
|
||||
2. **Cloning** — Repository is being cloned or updated
|
||||
3. **Scanning** — Static analysis and SBOM extraction are running
|
||||
4. **Analyzing** — CVE cross-referencing and graph construction
|
||||
5. **Reporting** — Creating tracker issues for new findings
|
||||
6. **Completed** — All phases finished successfully
|
||||
|
||||
If any phase fails, the scan status is set to **Failed** with an error message.
|
||||
|
||||
## Viewing Scan History
|
||||
|
||||
The Overview page shows the 10 most recent scan runs across all repositories, including:
|
||||
|
||||
- Repository name
|
||||
- Scan status
|
||||
- Current phase
|
||||
- Number of findings discovered
|
||||
- Start time and duration
|
||||
|
||||
## Scan Run Statuses
|
||||
|
||||
| Status | Meaning |
|
||||
|--------|---------|
|
||||
| `queued` | Waiting to start |
|
||||
| `running` | Currently executing |
|
||||
| `completed` | Finished successfully |
|
||||
| `failed` | Stopped due to an error |
|
||||
|
||||
## Deduplication
|
||||
|
||||
Findings are deduplicated using a fingerprint hash based on the scanner, file path, line number, and vulnerability type. Repeated scans will not create duplicate findings for the same issue.
|
||||
29
docs/index.md
Normal file
29
docs/index.md
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
layout: home
|
||||
|
||||
hero:
|
||||
name: Compliance Scanner
|
||||
text: AI-Powered Security Compliance
|
||||
tagline: Automated SAST, SBOM, DAST, CVE monitoring, and code intelligence for your repositories
|
||||
actions:
|
||||
- theme: brand
|
||||
text: Get Started
|
||||
link: /guide/getting-started
|
||||
- theme: alt
|
||||
text: Features
|
||||
link: /features/overview
|
||||
|
||||
features:
|
||||
- title: Static Analysis (SAST)
|
||||
details: Automated security scanning with Semgrep, detecting vulnerabilities across multiple languages including OWASP patterns, GDPR issues, and OAuth misconfigurations.
|
||||
- title: SBOM & License Compliance
|
||||
details: Full software bill of materials with dependency inventory, vulnerability tracking, license compliance analysis, and export to CycloneDX/SPDX formats.
|
||||
- title: Dynamic Testing (DAST)
|
||||
details: Black-box security testing of live web applications and APIs. Crawls endpoints, fuzzes parameters, and detects SQL injection, XSS, SSRF, and auth bypass vulnerabilities.
|
||||
- title: Code Knowledge Graph
|
||||
details: Interactive visualization of your codebase structure. Understand function calls, class hierarchies, and module dependencies with community detection.
|
||||
- title: Impact Analysis
|
||||
details: When a vulnerability is found, see exactly which entry points and call chains are affected. Understand blast radius before prioritizing fixes.
|
||||
- title: AI-Powered Chat
|
||||
details: Ask questions about your codebase using RAG-powered AI. Code is embedded as vectors and retrieved contextually to give accurate, source-referenced answers.
|
||||
---
|
||||
15
docs/nginx.conf
Normal file
15
docs/nginx.conf
Normal file
@@ -0,0 +1,15 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ $uri.html /index.html;
|
||||
}
|
||||
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
}
|
||||
2514
docs/package-lock.json
generated
Normal file
2514
docs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
12
docs/package.json
Normal file
12
docs/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "compliance-scanner-docs",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vitepress dev",
|
||||
"build": "vitepress build",
|
||||
"preview": "vitepress preview"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vitepress": "^1.6.4"
|
||||
}
|
||||
}
|
||||
52
otel-collector-config.yaml
Normal file
52
otel-collector-config.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
|
||||
processors:
|
||||
batch:
|
||||
timeout: 5s
|
||||
send_batch_size: 1024
|
||||
|
||||
exporters:
|
||||
# Log to stdout for debugging
|
||||
debug:
|
||||
verbosity: basic
|
||||
|
||||
# Configure your backend below. Examples:
|
||||
#
|
||||
# SigNoz:
|
||||
# otlp/signoz:
|
||||
# endpoint: "signoz-otel-collector:4317"
|
||||
# tls:
|
||||
# insecure: true
|
||||
#
|
||||
# Grafana Tempo (traces):
|
||||
# otlp/tempo:
|
||||
# endpoint: "tempo:4317"
|
||||
# tls:
|
||||
# insecure: true
|
||||
#
|
||||
# Grafana Loki (logs):
|
||||
# loki:
|
||||
# endpoint: "http://loki:3100/loki/api/v1/push"
|
||||
#
|
||||
# Jaeger:
|
||||
# otlp/jaeger:
|
||||
# endpoint: "jaeger:4317"
|
||||
# tls:
|
||||
# insecure: true
|
||||
|
||||
service:
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [debug]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [debug]
|
||||
Reference in New Issue
Block a user