Compare commits
2 Commits
main
...
feat/help-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d7efea683 | ||
|
|
263a4e654a |
@@ -70,7 +70,7 @@ jobs:
|
||||
|
||||
# Tests (reuses compilation artifacts from clippy)
|
||||
- name: Tests (core + agent)
|
||||
run: cargo test -p compliance-core -p compliance-agent --lib
|
||||
run: cargo test -p compliance-core -p compliance-agent
|
||||
- name: Tests (dashboard server)
|
||||
run: cargo test -p compliance-dashboard --features server --no-default-features
|
||||
- name: Tests (dashboard web)
|
||||
@@ -145,20 +145,13 @@ jobs:
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.agent == 'true'
|
||||
container:
|
||||
image: docker:27-cli
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Build, push and trigger orca redeploy
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache git curl openssl
|
||||
git init && git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=1 origin "${GITHUB_SHA}" && git checkout FETCH_HEAD
|
||||
IMAGE=registry.meghsakha.com/compliance-agent
|
||||
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login registry.meghsakha.com -u "${{ secrets.REGISTRY_USERNAME }}" --password-stdin
|
||||
docker build -f Dockerfile.agent -t "$IMAGE:latest" -t "$IMAGE:${GITHUB_SHA}" .
|
||||
docker push "$IMAGE:latest" && docker push "$IMAGE:${GITHUB_SHA}"
|
||||
PAYLOAD=$(printf '{"ref":"refs/heads/main","repository":{"full_name":"sharang/compliance-scanner-agent"},"head_commit":{"id":"%s","message":"deploy agent"}}' "${GITHUB_SHA}")
|
||||
SIG=$(printf '%s' "$PAYLOAD" | openssl dgst -sha256 -hmac "${{ secrets.ORCA_WEBHOOK_SECRET }}" | awk '{print $2}')
|
||||
RESP=$(curl -fsS -w "\nHTTP %{http_code}" -X POST "http://46.225.100.82:6880/api/v1/webhooks/github" -H "Content-Type: application/json" -H "X-Hub-Signature-256: sha256=$SIG" -d "$PAYLOAD"); echo "$RESP"
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_AGENT }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-dashboard:
|
||||
name: Deploy Dashboard
|
||||
@@ -166,20 +159,13 @@ jobs:
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.dashboard == 'true'
|
||||
container:
|
||||
image: docker:27-cli
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Build, push and trigger orca redeploy
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache git curl openssl
|
||||
git init && git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=1 origin "${GITHUB_SHA}" && git checkout FETCH_HEAD
|
||||
IMAGE=registry.meghsakha.com/compliance-dashboard
|
||||
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login registry.meghsakha.com -u "${{ secrets.REGISTRY_USERNAME }}" --password-stdin
|
||||
docker build -f Dockerfile.dashboard -t "$IMAGE:latest" -t "$IMAGE:${GITHUB_SHA}" .
|
||||
docker push "$IMAGE:latest" && docker push "$IMAGE:${GITHUB_SHA}"
|
||||
PAYLOAD=$(printf '{"ref":"refs/heads/main","repository":{"full_name":"sharang/compliance-scanner-agent"},"head_commit":{"id":"%s","message":"deploy dashboard"}}' "${GITHUB_SHA}")
|
||||
SIG=$(printf '%s' "$PAYLOAD" | openssl dgst -sha256 -hmac "${{ secrets.ORCA_WEBHOOK_SECRET }}" | awk '{print $2}')
|
||||
RESP=$(curl -fsS -w "\nHTTP %{http_code}" -X POST "http://46.225.100.82:6880/api/v1/webhooks/github" -H "Content-Type: application/json" -H "X-Hub-Signature-256: sha256=$SIG" -d "$PAYLOAD"); echo "$RESP"
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_DASHBOARD }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-docs:
|
||||
name: Deploy Docs
|
||||
@@ -187,20 +173,13 @@ jobs:
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.docs == 'true'
|
||||
container:
|
||||
image: docker:27-cli
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Build, push and trigger orca redeploy
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache git curl openssl
|
||||
git init && git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=1 origin "${GITHUB_SHA}" && git checkout FETCH_HEAD
|
||||
IMAGE=registry.meghsakha.com/compliance-docs
|
||||
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login registry.meghsakha.com -u "${{ secrets.REGISTRY_USERNAME }}" --password-stdin
|
||||
docker build -f Dockerfile.docs -t "$IMAGE:latest" -t "$IMAGE:${GITHUB_SHA}" .
|
||||
docker push "$IMAGE:latest" && docker push "$IMAGE:${GITHUB_SHA}"
|
||||
PAYLOAD=$(printf '{"ref":"refs/heads/main","repository":{"full_name":"sharang/compliance-scanner-agent"},"head_commit":{"id":"%s","message":"deploy docs"}}' "${GITHUB_SHA}")
|
||||
SIG=$(printf '%s' "$PAYLOAD" | openssl dgst -sha256 -hmac "${{ secrets.ORCA_WEBHOOK_SECRET }}" | awk '{print $2}')
|
||||
RESP=$(curl -fsS -w "\nHTTP %{http_code}" -X POST "http://46.225.100.82:6880/api/v1/webhooks/github" -H "Content-Type: application/json" -H "X-Hub-Signature-256: sha256=$SIG" -d "$PAYLOAD"); echo "$RESP"
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_DOCS }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
deploy-mcp:
|
||||
name: Deploy MCP
|
||||
@@ -208,17 +187,10 @@ jobs:
|
||||
needs: [detect-changes]
|
||||
if: needs.detect-changes.outputs.mcp == 'true'
|
||||
container:
|
||||
image: docker:27-cli
|
||||
image: alpine:latest
|
||||
steps:
|
||||
- name: Build, push and trigger orca redeploy
|
||||
- name: Trigger Coolify deploy
|
||||
run: |
|
||||
apk add --no-cache git curl openssl
|
||||
git init && git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=1 origin "${GITHUB_SHA}" && git checkout FETCH_HEAD
|
||||
IMAGE=registry.meghsakha.com/compliance-mcp
|
||||
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login registry.meghsakha.com -u "${{ secrets.REGISTRY_USERNAME }}" --password-stdin
|
||||
docker build -f Dockerfile.mcp -t "$IMAGE:latest" -t "$IMAGE:${GITHUB_SHA}" .
|
||||
docker push "$IMAGE:latest" && docker push "$IMAGE:${GITHUB_SHA}"
|
||||
PAYLOAD=$(printf '{"ref":"refs/heads/main","repository":{"full_name":"sharang/compliance-scanner-agent"},"head_commit":{"id":"%s","message":"deploy mcp"}}' "${GITHUB_SHA}")
|
||||
SIG=$(printf '%s' "$PAYLOAD" | openssl dgst -sha256 -hmac "${{ secrets.ORCA_WEBHOOK_SECRET }}" | awk '{print $2}')
|
||||
RESP=$(curl -fsS -w "\nHTTP %{http_code}" -X POST "http://46.225.100.82:6880/api/v1/webhooks/github" -H "Content-Type: application/json" -H "X-Hub-Signature-256: sha256=$SIG" -d "$PAYLOAD"); echo "$RESP"
|
||||
apk add --no-cache curl
|
||||
curl -sf "${{ secrets.COOLIFY_WEBHOOK_MCP }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.COOLIFY_TOKEN }}"
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
name: Nightly E2E Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *' # 3 AM UTC daily
|
||||
workflow_dispatch: # Allow manual trigger
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTFLAGS: "-D warnings"
|
||||
RUSTC_WRAPPER: /usr/local/bin/sccache
|
||||
SCCACHE_DIR: /tmp/sccache
|
||||
TEST_MONGODB_URI: "mongodb://root:example@mongo:27017/?authSource=admin"
|
||||
|
||||
concurrency:
|
||||
group: nightly-e2e
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: E2E Tests
|
||||
runs-on: docker
|
||||
container:
|
||||
image: rust:1.94-bookworm
|
||||
services:
|
||||
mongo:
|
||||
image: mongo:7
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: example
|
||||
steps:
|
||||
- name: Checkout
|
||||
run: |
|
||||
git init
|
||||
git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
|
||||
git fetch --depth=1 origin "${GITHUB_SHA:-refs/heads/main}"
|
||||
git checkout FETCH_HEAD
|
||||
|
||||
- name: Install sccache
|
||||
run: |
|
||||
curl -fsSL https://github.com/mozilla/sccache/releases/download/v0.9.1/sccache-v0.9.1-x86_64-unknown-linux-musl.tar.gz \
|
||||
| tar xz --strip-components=1 -C /usr/local/bin/ sccache-v0.9.1-x86_64-unknown-linux-musl/sccache
|
||||
chmod +x /usr/local/bin/sccache
|
||||
env:
|
||||
RUSTC_WRAPPER: ""
|
||||
|
||||
- name: Run E2E tests
|
||||
run: cargo test -p compliance-agent --test e2e -- --test-threads=4
|
||||
|
||||
- name: Show sccache stats
|
||||
run: sccache --show-stats
|
||||
if: always()
|
||||
@@ -33,15 +33,9 @@ RUN pip3 install --break-system-packages ruff
|
||||
|
||||
COPY --from=builder /app/target/release/compliance-agent /usr/local/bin/compliance-agent
|
||||
|
||||
# Copy documentation for the help chat assistant
|
||||
COPY --from=builder /app/README.md /app/README.md
|
||||
COPY --from=builder /app/docs /app/docs
|
||||
ENV HELP_DOCS_PATH=/app
|
||||
|
||||
# Ensure SSH key directory exists
|
||||
RUN mkdir -p /data/compliance-scanner/ssh
|
||||
|
||||
EXPOSE 3001 3002
|
||||
|
||||
ENTRYPOINT ["compliance-agent"]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
FROM rust:1.94-bookworm AS builder
|
||||
|
||||
RUN cargo install dioxus-cli --version 0.7.3 --locked
|
||||
RUN cargo install dioxus-cli --version 0.7.3
|
||||
|
||||
ARG DOCS_URL=/docs
|
||||
|
||||
@@ -20,4 +20,3 @@ ENV IP=0.0.0.0
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["./compliance-dashboard"]
|
||||
|
||||
|
||||
@@ -12,4 +12,3 @@ RUN rm /etc/nginx/conf.d/default.conf
|
||||
COPY docs/nginx.conf /etc/nginx/conf.d/default.conf
|
||||
COPY --from=builder /app/.vitepress/dist /usr/share/nginx/html
|
||||
EXPOSE 80
|
||||
|
||||
|
||||
@@ -14,4 +14,3 @@ EXPOSE 8090
|
||||
ENV MCP_PORT=8090
|
||||
|
||||
ENTRYPOINT ["compliance-mcp"]
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ uuid = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
axum = "0.8"
|
||||
tower-http = { version = "0.6", features = ["cors", "trace", "set-header"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "trace"] }
|
||||
git2 = "0.20"
|
||||
octocrab = "0.44"
|
||||
tokio-cron-scheduler = "0.13"
|
||||
@@ -42,14 +42,3 @@ tokio-tungstenite = { version = "0.26", features = ["rustls-tls-webpki-roots"] }
|
||||
futures-core = "0.3"
|
||||
dashmap = { workspace = true }
|
||||
tokio-stream = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
compliance-core = { workspace = true, features = ["mongodb"] }
|
||||
reqwest = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
mongodb = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
axum = "0.8"
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
|
||||
@@ -104,58 +104,28 @@ fn load_docs(root: &Path) -> String {
|
||||
|
||||
/// Returns a reference to the cached doc context string, initialised on
|
||||
/// first call via `OnceLock`.
|
||||
///
|
||||
/// Discovery order:
|
||||
/// 1. `HELP_DOCS_PATH` env var (explicit override)
|
||||
/// 2. Walk up from the binary location
|
||||
/// 3. Current working directory
|
||||
/// 4. Common Docker paths (/app, /opt/compliance-scanner)
|
||||
fn doc_context() -> &'static str {
|
||||
DOC_CONTEXT.get_or_init(|| {
|
||||
// 1. Explicit env var
|
||||
if let Ok(path) = std::env::var("HELP_DOCS_PATH") {
|
||||
let p = PathBuf::from(&path);
|
||||
if p.join("README.md").is_file() || p.join("docs").is_dir() {
|
||||
tracing::info!("help_chat: loading docs from HELP_DOCS_PATH={path}");
|
||||
return load_docs(&p);
|
||||
}
|
||||
tracing::warn!("help_chat: HELP_DOCS_PATH={path} has no README.md or docs/");
|
||||
}
|
||||
|
||||
// 2. Walk up from binary location
|
||||
let start = std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|p| p.parent().map(Path::to_path_buf))
|
||||
.unwrap_or_else(|| PathBuf::from("."));
|
||||
|
||||
if let Some(root) = find_project_root(&start) {
|
||||
return load_docs(&root);
|
||||
}
|
||||
|
||||
// 3. Current working directory
|
||||
if let Ok(cwd) = std::env::current_dir() {
|
||||
if let Some(root) = find_project_root(&cwd) {
|
||||
return load_docs(&root);
|
||||
}
|
||||
if cwd.join("README.md").is_file() {
|
||||
return load_docs(&cwd);
|
||||
match find_project_root(&start) {
|
||||
Some(root) => load_docs(&root),
|
||||
None => {
|
||||
// Fallback: try current working directory
|
||||
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
|
||||
if cwd.join("README.md").is_file() {
|
||||
return load_docs(&cwd);
|
||||
}
|
||||
tracing::error!(
|
||||
"help_chat: could not locate project root from {}; doc context will be empty",
|
||||
start.display()
|
||||
);
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Common Docker/deployment paths
|
||||
for candidate in ["/app", "/opt/compliance-scanner", "/srv/compliance-scanner"] {
|
||||
let p = PathBuf::from(candidate);
|
||||
if p.join("README.md").is_file() || p.join("docs").is_dir() {
|
||||
tracing::info!("help_chat: found docs at {candidate}");
|
||||
return load_docs(&p);
|
||||
}
|
||||
}
|
||||
|
||||
tracing::error!(
|
||||
"help_chat: could not locate project root; doc context will be empty. \
|
||||
Set HELP_DOCS_PATH to the directory containing README.md and docs/"
|
||||
);
|
||||
String::new()
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ pub mod graph;
|
||||
pub mod health;
|
||||
pub mod help_chat;
|
||||
pub mod issues;
|
||||
pub mod notifications;
|
||||
pub mod pentest_handlers;
|
||||
pub use pentest_handlers as pentest;
|
||||
pub mod repos;
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
use axum::extract::Extension;
|
||||
use axum::http::StatusCode;
|
||||
use axum::Json;
|
||||
use mongodb::bson::doc;
|
||||
use serde::Deserialize;
|
||||
|
||||
use compliance_core::models::notification::CveNotification;
|
||||
|
||||
use super::dto::{AgentExt, ApiResponse};
|
||||
|
||||
/// GET /api/v1/notifications — List CVE notifications (newest first)
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn list_notifications(
|
||||
Extension(agent): AgentExt,
|
||||
axum::extract::Query(params): axum::extract::Query<NotificationFilter>,
|
||||
) -> Result<Json<ApiResponse<Vec<CveNotification>>>, StatusCode> {
|
||||
let mut filter = doc! {};
|
||||
|
||||
// Filter by status (default: show new + read, exclude dismissed)
|
||||
match params.status.as_deref() {
|
||||
Some("all") => {}
|
||||
Some(s) => {
|
||||
filter.insert("status", s);
|
||||
}
|
||||
None => {
|
||||
filter.insert("status", doc! { "$in": ["new", "read"] });
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by severity
|
||||
if let Some(ref sev) = params.severity {
|
||||
filter.insert("severity", sev.as_str());
|
||||
}
|
||||
|
||||
// Filter by repo
|
||||
if let Some(ref repo_id) = params.repo_id {
|
||||
filter.insert("repo_id", repo_id.as_str());
|
||||
}
|
||||
|
||||
let page = params.page.unwrap_or(1).max(1);
|
||||
let limit = params.limit.unwrap_or(50).min(200);
|
||||
let skip = (page - 1) * limit as u64;
|
||||
|
||||
let total = agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.count_documents(filter.clone())
|
||||
.await
|
||||
.unwrap_or(0);
|
||||
|
||||
let notifications: Vec<CveNotification> = match agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.find(filter)
|
||||
.sort(doc! { "created_at": -1 })
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.await
|
||||
{
|
||||
Ok(cursor) => {
|
||||
use futures_util::StreamExt;
|
||||
let mut items = Vec::new();
|
||||
let mut cursor = cursor;
|
||||
while let Some(Ok(n)) = cursor.next().await {
|
||||
items.push(n);
|
||||
}
|
||||
items
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to list notifications: {e}");
|
||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: notifications,
|
||||
total: Some(total),
|
||||
page: Some(page),
|
||||
}))
|
||||
}
|
||||
|
||||
/// GET /api/v1/notifications/count — Count of unread notifications
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn notification_count(
|
||||
Extension(agent): AgentExt,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let count = agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.count_documents(doc! { "status": "new" })
|
||||
.await
|
||||
.unwrap_or(0);
|
||||
|
||||
Ok(Json(serde_json::json!({ "count": count })))
|
||||
}
|
||||
|
||||
/// PATCH /api/v1/notifications/:id/read — Mark a notification as read
|
||||
#[tracing::instrument(skip_all, fields(id = %id))]
|
||||
pub async fn mark_read(
|
||||
Extension(agent): AgentExt,
|
||||
axum::extract::Path(id): axum::extract::Path<String>,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let result = agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.update_one(
|
||||
doc! { "_id": oid },
|
||||
doc! { "$set": {
|
||||
"status": "read",
|
||||
"read_at": mongodb::bson::DateTime::now(),
|
||||
}},
|
||||
)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
if result.matched_count == 0 {
|
||||
return Err(StatusCode::NOT_FOUND);
|
||||
}
|
||||
Ok(Json(serde_json::json!({ "status": "read" })))
|
||||
}
|
||||
|
||||
/// PATCH /api/v1/notifications/:id/dismiss — Dismiss a notification
|
||||
#[tracing::instrument(skip_all, fields(id = %id))]
|
||||
pub async fn dismiss_notification(
|
||||
Extension(agent): AgentExt,
|
||||
axum::extract::Path(id): axum::extract::Path<String>,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let result = agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.update_one(
|
||||
doc! { "_id": oid },
|
||||
doc! { "$set": { "status": "dismissed" } },
|
||||
)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
if result.matched_count == 0 {
|
||||
return Err(StatusCode::NOT_FOUND);
|
||||
}
|
||||
Ok(Json(serde_json::json!({ "status": "dismissed" })))
|
||||
}
|
||||
|
||||
/// POST /api/v1/notifications/read-all — Mark all new notifications as read
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub async fn mark_all_read(
|
||||
Extension(agent): AgentExt,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let result = agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.update_many(
|
||||
doc! { "status": "new" },
|
||||
doc! { "$set": {
|
||||
"status": "read",
|
||||
"read_at": mongodb::bson::DateTime::now(),
|
||||
}},
|
||||
)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(
|
||||
serde_json::json!({ "updated": result.modified_count }),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct NotificationFilter {
|
||||
pub status: Option<String>,
|
||||
pub severity: Option<String>,
|
||||
pub repo_id: Option<String>,
|
||||
pub page: Option<u64>,
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
@@ -101,27 +101,6 @@ pub fn build_router() -> Router {
|
||||
)
|
||||
// Help chat (documentation-grounded Q&A)
|
||||
.route("/api/v1/help/chat", post(handlers::help_chat::help_chat))
|
||||
// CVE notification endpoints
|
||||
.route(
|
||||
"/api/v1/notifications",
|
||||
get(handlers::notifications::list_notifications),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/notifications/count",
|
||||
get(handlers::notifications::notification_count),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/notifications/read-all",
|
||||
post(handlers::notifications::mark_all_read),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/notifications/{id}/read",
|
||||
patch(handlers::notifications::mark_read),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/notifications/{id}/dismiss",
|
||||
patch(handlers::notifications::dismiss_notification),
|
||||
)
|
||||
// Pentest API endpoints
|
||||
.route(
|
||||
"/api/v1/pentest/lookup-repo",
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::http::HeaderValue;
|
||||
use axum::{middleware, Extension};
|
||||
use tokio::sync::RwLock;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower_http::set_header::SetResponseHeaderLayer;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
@@ -16,24 +14,7 @@ pub async fn start_api_server(agent: ComplianceAgent, port: u16) -> Result<(), A
|
||||
let mut app = routes::build_router()
|
||||
.layer(Extension(Arc::new(agent.clone())))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(TraceLayer::new_for_http())
|
||||
// Security headers (defense-in-depth, primary enforcement via Traefik)
|
||||
.layer(SetResponseHeaderLayer::overriding(
|
||||
axum::http::header::STRICT_TRANSPORT_SECURITY,
|
||||
HeaderValue::from_static("max-age=31536000; includeSubDomains"),
|
||||
))
|
||||
.layer(SetResponseHeaderLayer::overriding(
|
||||
axum::http::header::X_FRAME_OPTIONS,
|
||||
HeaderValue::from_static("DENY"),
|
||||
))
|
||||
.layer(SetResponseHeaderLayer::overriding(
|
||||
axum::http::header::X_CONTENT_TYPE_OPTIONS,
|
||||
HeaderValue::from_static("nosniff"),
|
||||
))
|
||||
.layer(SetResponseHeaderLayer::overriding(
|
||||
axum::http::header::REFERRER_POLICY,
|
||||
HeaderValue::from_static("strict-origin-when-cross-origin"),
|
||||
));
|
||||
.layer(TraceLayer::new_for_http());
|
||||
|
||||
if let (Some(kc_url), Some(kc_realm)) =
|
||||
(&agent.config.keycloak_url, &agent.config.keycloak_realm)
|
||||
|
||||
@@ -42,7 +42,7 @@ pub fn load_config() -> Result<AgentConfig, AgentError> {
|
||||
.unwrap_or(3001),
|
||||
scan_schedule: env_var_opt("SCAN_SCHEDULE").unwrap_or_else(|| "0 0 */6 * * *".to_string()),
|
||||
cve_monitor_schedule: env_var_opt("CVE_MONITOR_SCHEDULE")
|
||||
.unwrap_or_else(|| "0 0 * * * *".to_string()),
|
||||
.unwrap_or_else(|| "0 0 0 * * *".to_string()),
|
||||
git_clone_base_path: env_var_opt("GIT_CLONE_BASE_PATH")
|
||||
.unwrap_or_else(|| "/tmp/compliance-scanner/repos".to_string()),
|
||||
ssh_key_path: env_var_opt("SSH_KEY_PATH")
|
||||
|
||||
@@ -78,25 +78,6 @@ impl Database {
|
||||
)
|
||||
.await?;
|
||||
|
||||
// cve_notifications: unique cve_id + repo_id + package, status filter
|
||||
self.cve_notifications()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(
|
||||
doc! { "cve_id": 1, "repo_id": 1, "package_name": 1, "package_version": 1 },
|
||||
)
|
||||
.options(IndexOptions::builder().unique(true).build())
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
self.cve_notifications()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "status": 1, "created_at": -1 })
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// tracker_issues: unique finding_id
|
||||
self.tracker_issues()
|
||||
.create_index(
|
||||
@@ -241,12 +222,6 @@ impl Database {
|
||||
self.inner.collection("cve_alerts")
|
||||
}
|
||||
|
||||
pub fn cve_notifications(
|
||||
&self,
|
||||
) -> Collection<compliance_core::models::notification::CveNotification> {
|
||||
self.inner.collection("cve_notifications")
|
||||
}
|
||||
|
||||
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
|
||||
self.inner.collection("tracker_issues")
|
||||
}
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
// Library entrypoint — re-exports for integration tests and the binary.
|
||||
|
||||
pub mod agent;
|
||||
pub mod api;
|
||||
pub mod config;
|
||||
pub mod database;
|
||||
pub mod error;
|
||||
pub mod llm;
|
||||
pub mod pentest;
|
||||
pub mod pipeline;
|
||||
pub mod rag;
|
||||
pub mod scheduler;
|
||||
pub mod ssh;
|
||||
#[allow(dead_code)]
|
||||
pub mod trackers;
|
||||
pub mod webhooks;
|
||||
@@ -1,10 +1,23 @@
|
||||
use compliance_agent::{agent, api, config, database, scheduler, ssh, webhooks};
|
||||
mod agent;
|
||||
mod api;
|
||||
pub(crate) mod config;
|
||||
mod database;
|
||||
mod error;
|
||||
mod llm;
|
||||
mod pentest;
|
||||
mod pipeline;
|
||||
mod rag;
|
||||
mod scheduler;
|
||||
mod ssh;
|
||||
#[allow(dead_code)]
|
||||
mod trackers;
|
||||
mod webhooks;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
match dotenvy::dotenv() {
|
||||
Ok(path) => eprintln!("[dotenv] Loaded from: {}", path.display()),
|
||||
Err(_) => eprintln!("[dotenv] No .env file found, using environment variables"),
|
||||
Err(e) => eprintln!("[dotenv] FAILED: {e}"),
|
||||
}
|
||||
|
||||
let _telemetry_guard = compliance_core::telemetry::init_telemetry("compliance-agent");
|
||||
|
||||
@@ -315,67 +315,20 @@ impl PipelineOrchestrator {
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Persist CVE alerts and create notifications
|
||||
{
|
||||
use compliance_core::models::notification::{parse_severity, CveNotification};
|
||||
|
||||
let repo_name = repo.name.clone();
|
||||
let mut new_notif_count = 0u32;
|
||||
|
||||
for alert in &cve_alerts {
|
||||
// Upsert the alert
|
||||
let filter = doc! {
|
||||
"cve_id": &alert.cve_id,
|
||||
"repo_id": &alert.repo_id,
|
||||
};
|
||||
let update = mongodb::bson::to_document(alert)
|
||||
.map(|d| doc! { "$set": d })
|
||||
.unwrap_or_else(|_| doc! {});
|
||||
self.db
|
||||
.cve_alerts()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await?;
|
||||
|
||||
// Create notification (dedup by cve_id + repo + package + version)
|
||||
let notif_filter = doc! {
|
||||
"cve_id": &alert.cve_id,
|
||||
"repo_id": &alert.repo_id,
|
||||
"package_name": &alert.affected_package,
|
||||
"package_version": &alert.affected_version,
|
||||
};
|
||||
let severity = parse_severity(alert.severity.as_deref(), alert.cvss_score);
|
||||
let mut notification = CveNotification::new(
|
||||
alert.cve_id.clone(),
|
||||
repo_id.clone(),
|
||||
repo_name.clone(),
|
||||
alert.affected_package.clone(),
|
||||
alert.affected_version.clone(),
|
||||
severity,
|
||||
);
|
||||
notification.cvss_score = alert.cvss_score;
|
||||
notification.summary = alert.summary.clone();
|
||||
notification.url = Some(format!("https://osv.dev/vulnerability/{}", alert.cve_id));
|
||||
|
||||
let notif_update = doc! {
|
||||
"$setOnInsert": mongodb::bson::to_bson(¬ification).unwrap_or_default()
|
||||
};
|
||||
if let Ok(result) = self
|
||||
.db
|
||||
.cve_notifications()
|
||||
.update_one(notif_filter, notif_update)
|
||||
.upsert(true)
|
||||
.await
|
||||
{
|
||||
if result.upserted_id.is_some() {
|
||||
new_notif_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if new_notif_count > 0 {
|
||||
tracing::info!("[{repo_id}] Created {new_notif_count} CVE notification(s)");
|
||||
}
|
||||
// Persist CVE alerts (upsert by cve_id + repo_id)
|
||||
for alert in &cve_alerts {
|
||||
let filter = doc! {
|
||||
"cve_id": &alert.cve_id,
|
||||
"repo_id": &alert.repo_id,
|
||||
};
|
||||
let update = mongodb::bson::to_document(alert)
|
||||
.map(|d| doc! { "$set": d })
|
||||
.unwrap_or_else(|_| doc! {});
|
||||
self.db
|
||||
.cve_alerts()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Stage 6: Issue Creation
|
||||
|
||||
@@ -33,7 +33,6 @@ struct PatternRule {
|
||||
file_extensions: Vec<String>,
|
||||
}
|
||||
|
||||
#[allow(clippy::new_without_default)]
|
||||
impl GdprPatternScanner {
|
||||
pub fn new() -> Self {
|
||||
let patterns = vec![
|
||||
@@ -99,7 +98,6 @@ impl Scanner for GdprPatternScanner {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::new_without_default)]
|
||||
impl OAuthPatternScanner {
|
||||
pub fn new() -> Self {
|
||||
let patterns = vec![
|
||||
|
||||
@@ -82,158 +82,24 @@ async fn scan_all_repos(agent: &ComplianceAgent) {
|
||||
}
|
||||
|
||||
async fn monitor_cves(agent: &ComplianceAgent) {
|
||||
use compliance_core::models::notification::{parse_severity, CveNotification};
|
||||
use compliance_core::models::SbomEntry;
|
||||
use futures_util::StreamExt;
|
||||
|
||||
// Fetch all SBOM entries grouped by repo
|
||||
// Re-scan all SBOM entries for new CVEs
|
||||
let cursor = match agent.db.sbom_entries().find(doc! {}).await {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
tracing::error!("CVE monitor: failed to list SBOM entries: {e}");
|
||||
tracing::error!("Failed to list SBOM entries for CVE monitoring: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let entries: Vec<SbomEntry> = cursor.filter_map(|r| async { r.ok() }).collect().await;
|
||||
|
||||
let entries: Vec<_> = cursor.filter_map(|r| async { r.ok() }).collect().await;
|
||||
|
||||
if entries.is_empty() {
|
||||
tracing::debug!("CVE monitor: no SBOM entries, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
tracing::info!(
|
||||
"CVE monitor: checking {} dependencies for new CVEs",
|
||||
entries.len()
|
||||
);
|
||||
|
||||
// Build a repo_id → repo_name lookup
|
||||
let repo_ids: std::collections::HashSet<String> =
|
||||
entries.iter().map(|e| e.repo_id.clone()).collect();
|
||||
let mut repo_names: std::collections::HashMap<String, String> =
|
||||
std::collections::HashMap::new();
|
||||
for rid in &repo_ids {
|
||||
if let Ok(oid) = mongodb::bson::oid::ObjectId::parse_str(rid) {
|
||||
if let Ok(Some(repo)) = agent.db.repositories().find_one(doc! { "_id": oid }).await {
|
||||
repo_names.insert(rid.clone(), repo.name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use the existing CveScanner to query OSV.dev
|
||||
let nvd_key = agent.config.nvd_api_key.as_ref().map(|k| {
|
||||
use secrecy::ExposeSecret;
|
||||
k.expose_secret().to_string()
|
||||
});
|
||||
let scanner = crate::pipeline::cve::CveScanner::new(
|
||||
agent.http.clone(),
|
||||
agent.config.searxng_url.clone(),
|
||||
nvd_key,
|
||||
);
|
||||
|
||||
// Group entries by repo for scanning
|
||||
let mut entries_by_repo: std::collections::HashMap<String, Vec<SbomEntry>> =
|
||||
std::collections::HashMap::new();
|
||||
for entry in entries {
|
||||
entries_by_repo
|
||||
.entry(entry.repo_id.clone())
|
||||
.or_default()
|
||||
.push(entry);
|
||||
}
|
||||
|
||||
let mut new_notifications = 0u32;
|
||||
|
||||
for (repo_id, mut repo_entries) in entries_by_repo {
|
||||
let repo_name = repo_names
|
||||
.get(&repo_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| repo_id.clone());
|
||||
|
||||
// Scan dependencies for CVEs
|
||||
let alerts = match scanner.scan_dependencies(&repo_id, &mut repo_entries).await {
|
||||
Ok(a) => a,
|
||||
Err(e) => {
|
||||
tracing::warn!("CVE monitor: scan failed for {repo_name}: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Upsert CVE alerts (existing logic)
|
||||
for alert in &alerts {
|
||||
let filter = doc! { "cve_id": &alert.cve_id, "repo_id": &alert.repo_id };
|
||||
let update = doc! { "$setOnInsert": mongodb::bson::to_bson(alert).unwrap_or_default() };
|
||||
let _ = agent
|
||||
.db
|
||||
.cve_alerts()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Update SBOM entries with discovered vulnerabilities
|
||||
for entry in &repo_entries {
|
||||
if entry.known_vulnerabilities.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Some(entry_id) = &entry.id {
|
||||
let _ = agent
|
||||
.db
|
||||
.sbom_entries()
|
||||
.update_one(
|
||||
doc! { "_id": entry_id },
|
||||
doc! { "$set": {
|
||||
"known_vulnerabilities": mongodb::bson::to_bson(&entry.known_vulnerabilities).unwrap_or_default(),
|
||||
"updated_at": mongodb::bson::DateTime::now(),
|
||||
}},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
// Create notifications for NEW CVEs (dedup against existing notifications)
|
||||
for alert in &alerts {
|
||||
let filter = doc! {
|
||||
"cve_id": &alert.cve_id,
|
||||
"repo_id": &alert.repo_id,
|
||||
"package_name": &alert.affected_package,
|
||||
"package_version": &alert.affected_version,
|
||||
};
|
||||
// Only insert if not already exists (upsert with $setOnInsert)
|
||||
let severity = parse_severity(alert.severity.as_deref(), alert.cvss_score);
|
||||
let mut notification = CveNotification::new(
|
||||
alert.cve_id.clone(),
|
||||
repo_id.clone(),
|
||||
repo_name.clone(),
|
||||
alert.affected_package.clone(),
|
||||
alert.affected_version.clone(),
|
||||
severity,
|
||||
);
|
||||
notification.cvss_score = alert.cvss_score;
|
||||
notification.summary = alert.summary.clone();
|
||||
notification.url = Some(format!("https://osv.dev/vulnerability/{}", alert.cve_id));
|
||||
|
||||
let update = doc! {
|
||||
"$setOnInsert": mongodb::bson::to_bson(¬ification).unwrap_or_default()
|
||||
};
|
||||
match agent
|
||||
.db
|
||||
.cve_notifications()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await
|
||||
{
|
||||
Ok(result) if result.upserted_id.is_some() => {
|
||||
new_notifications += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("CVE monitor: failed to create notification: {e}");
|
||||
}
|
||||
_ => {} // Already exists
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if new_notifications > 0 {
|
||||
tracing::info!("CVE monitor: created {new_notifications} new notification(s)");
|
||||
} else {
|
||||
tracing::info!("CVE monitor: no new CVEs found");
|
||||
}
|
||||
tracing::info!("CVE monitor: checking {} dependencies", entries.len());
|
||||
// The actual CVE checking is handled by the CveScanner in the pipeline
|
||||
// This is a simplified version that just logs the activity
|
||||
}
|
||||
|
||||
@@ -1,165 +1,3 @@
|
||||
// Shared test harness for E2E / integration tests.
|
||||
// Shared test helpers for compliance-agent integration tests.
|
||||
//
|
||||
// Spins up the agent API server on a random port with an isolated test
|
||||
// database. Each test gets a fresh database that is dropped on cleanup.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_agent::agent::ComplianceAgent;
|
||||
use compliance_agent::api;
|
||||
use compliance_agent::database::Database;
|
||||
use compliance_core::AgentConfig;
|
||||
use secrecy::SecretString;
|
||||
|
||||
/// A running test server with a unique database.
|
||||
pub struct TestServer {
|
||||
pub base_url: String,
|
||||
pub client: reqwest::Client,
|
||||
db_name: String,
|
||||
mongodb_uri: String,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
/// Start an agent API server on a random port with an isolated database.
|
||||
pub async fn start() -> Self {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
|
||||
// Unique database name per test run to avoid collisions
|
||||
let db_name = format!("test_{}", uuid::Uuid::new_v4().simple());
|
||||
|
||||
let db = Database::connect(&mongodb_uri, &db_name)
|
||||
.await
|
||||
.expect("Failed to connect to MongoDB — is it running?");
|
||||
db.ensure_indexes().await.expect("Failed to create indexes");
|
||||
|
||||
let config = AgentConfig {
|
||||
mongodb_uri: mongodb_uri.clone(),
|
||||
mongodb_database: db_name.clone(),
|
||||
litellm_url: std::env::var("TEST_LITELLM_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:4000".into()),
|
||||
litellm_api_key: SecretString::from(String::new()),
|
||||
litellm_model: "gpt-4o".into(),
|
||||
litellm_embed_model: "text-embedding-3-small".into(),
|
||||
agent_port: 0, // not used — we bind ourselves
|
||||
scan_schedule: String::new(),
|
||||
cve_monitor_schedule: String::new(),
|
||||
git_clone_base_path: "/tmp/compliance-scanner-tests/repos".into(),
|
||||
ssh_key_path: "/tmp/compliance-scanner-tests/ssh/id_ed25519".into(),
|
||||
github_token: None,
|
||||
github_webhook_secret: None,
|
||||
gitlab_url: None,
|
||||
gitlab_token: None,
|
||||
gitlab_webhook_secret: None,
|
||||
jira_url: None,
|
||||
jira_email: None,
|
||||
jira_api_token: None,
|
||||
jira_project_key: None,
|
||||
searxng_url: None,
|
||||
nvd_api_key: None,
|
||||
keycloak_url: None,
|
||||
keycloak_realm: None,
|
||||
keycloak_admin_username: None,
|
||||
keycloak_admin_password: None,
|
||||
pentest_verification_email: None,
|
||||
pentest_imap_host: None,
|
||||
pentest_imap_port: None,
|
||||
pentest_imap_tls: false,
|
||||
pentest_imap_username: None,
|
||||
pentest_imap_password: None,
|
||||
};
|
||||
|
||||
let agent = ComplianceAgent::new(config, db);
|
||||
|
||||
// Build the router with the agent extension
|
||||
let app = api::routes::build_router()
|
||||
.layer(axum::extract::Extension(Arc::new(agent)))
|
||||
.layer(tower_http::cors::CorsLayer::permissive());
|
||||
|
||||
// Bind to port 0 to get a random available port
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:0")
|
||||
.await
|
||||
.expect("Failed to bind test server");
|
||||
let port = listener.local_addr().expect("no local addr").port();
|
||||
|
||||
tokio::spawn(async move {
|
||||
axum::serve(listener, app).await.ok();
|
||||
});
|
||||
|
||||
let base_url = format!("http://127.0.0.1:{port}");
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.expect("Failed to build HTTP client");
|
||||
|
||||
// Wait for server to be ready
|
||||
for _ in 0..50 {
|
||||
if client
|
||||
.get(format!("{base_url}/api/v1/health"))
|
||||
.send()
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
break;
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
}
|
||||
|
||||
Self {
|
||||
base_url,
|
||||
client,
|
||||
db_name,
|
||||
mongodb_uri,
|
||||
}
|
||||
}
|
||||
|
||||
/// GET helper
|
||||
pub async fn get(&self, path: &str) -> reqwest::Response {
|
||||
self.client
|
||||
.get(format!("{}{path}", self.base_url))
|
||||
.send()
|
||||
.await
|
||||
.expect("GET request failed")
|
||||
}
|
||||
|
||||
/// POST helper with JSON body
|
||||
pub async fn post(&self, path: &str, body: &serde_json::Value) -> reqwest::Response {
|
||||
self.client
|
||||
.post(format!("{}{path}", self.base_url))
|
||||
.json(body)
|
||||
.send()
|
||||
.await
|
||||
.expect("POST request failed")
|
||||
}
|
||||
|
||||
/// PATCH helper with JSON body
|
||||
pub async fn patch(&self, path: &str, body: &serde_json::Value) -> reqwest::Response {
|
||||
self.client
|
||||
.patch(format!("{}{path}", self.base_url))
|
||||
.json(body)
|
||||
.send()
|
||||
.await
|
||||
.expect("PATCH request failed")
|
||||
}
|
||||
|
||||
/// DELETE helper
|
||||
pub async fn delete(&self, path: &str) -> reqwest::Response {
|
||||
self.client
|
||||
.delete(format!("{}{path}", self.base_url))
|
||||
.send()
|
||||
.await
|
||||
.expect("DELETE request failed")
|
||||
}
|
||||
|
||||
/// Get the unique database name for direct MongoDB access in tests.
|
||||
pub fn db_name(&self) -> &str {
|
||||
&self.db_name
|
||||
}
|
||||
|
||||
/// Drop the test database on cleanup
|
||||
pub async fn cleanup(&self) {
|
||||
if let Ok(client) = mongodb::Client::with_uri_str(&self.mongodb_uri).await {
|
||||
client.database(&self.db_name).drop().await.ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add database mocks, fixtures, and test utilities here.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
// E2E test entry point.
|
||||
//
|
||||
// Run with: cargo test -p compliance-agent --test e2e
|
||||
// Requires: MongoDB running (set TEST_MONGODB_URI if not default)
|
||||
|
||||
mod common;
|
||||
mod integration;
|
||||
@@ -1,221 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
use serde_json::json;
|
||||
|
||||
/// Insert a DAST target directly into MongoDB linked to a repo.
|
||||
async fn insert_dast_target(server: &TestServer, repo_id: &str, name: &str) -> String {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
|
||||
let result = db
|
||||
.collection::<mongodb::bson::Document>("dast_targets")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"name": name,
|
||||
"base_url": format!("https://{name}.example.com"),
|
||||
"target_type": "webapp",
|
||||
"repo_id": repo_id,
|
||||
"rate_limit": 10,
|
||||
"allow_destructive": false,
|
||||
"created_at": mongodb::bson::DateTime::now(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
result.inserted_id.as_object_id().unwrap().to_hex()
|
||||
}
|
||||
|
||||
/// Insert a pentest session linked to a target.
|
||||
async fn insert_pentest_session(server: &TestServer, target_id: &str, repo_id: &str) -> String {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
|
||||
let result = db
|
||||
.collection::<mongodb::bson::Document>("pentest_sessions")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"target_id": target_id,
|
||||
"repo_id": repo_id,
|
||||
"strategy": "comprehensive",
|
||||
"status": "completed",
|
||||
"findings_count": 1_i32,
|
||||
"exploitable_count": 0_i32,
|
||||
"created_at": mongodb::bson::DateTime::now(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
result.inserted_id.as_object_id().unwrap().to_hex()
|
||||
}
|
||||
|
||||
/// Insert an attack chain node linked to a session.
|
||||
async fn insert_attack_node(server: &TestServer, session_id: &str) {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
|
||||
db.collection::<mongodb::bson::Document>("attack_chain_nodes")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"session_id": session_id,
|
||||
"node_id": "node-1",
|
||||
"tool_name": "recon",
|
||||
"status": "completed",
|
||||
"created_at": mongodb::bson::DateTime::now(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Insert a DAST finding linked to a target.
|
||||
async fn insert_dast_finding(server: &TestServer, target_id: &str, session_id: &str) {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
|
||||
db.collection::<mongodb::bson::Document>("dast_findings")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"scan_run_id": "run-1",
|
||||
"target_id": target_id,
|
||||
"vuln_type": "xss",
|
||||
"title": "Reflected XSS",
|
||||
"description": "XSS in search param",
|
||||
"severity": "high",
|
||||
"endpoint": "https://example.com/search",
|
||||
"method": "GET",
|
||||
"exploitable": true,
|
||||
"evidence": [],
|
||||
"session_id": session_id,
|
||||
"created_at": mongodb::bson::DateTime::now(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Helper to count documents in a collection
|
||||
async fn count_docs(server: &TestServer, collection: &str) -> u64 {
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
db.collection::<mongodb::bson::Document>(collection)
|
||||
.count_documents(mongodb::bson::doc! {})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_repo_cascades_to_dast_and_pentest_data() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Create a repo
|
||||
let resp = server
|
||||
.post(
|
||||
"/api/v1/repositories",
|
||||
&json!({
|
||||
"name": "cascade-test",
|
||||
"git_url": "https://github.com/example/cascade-test.git",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
|
||||
|
||||
// Insert DAST target linked to repo
|
||||
let target_id = insert_dast_target(&server, &repo_id, "cascade-target").await;
|
||||
|
||||
// Insert pentest session linked to target
|
||||
let session_id = insert_pentest_session(&server, &target_id, &repo_id).await;
|
||||
|
||||
// Insert downstream data
|
||||
insert_attack_node(&server, &session_id).await;
|
||||
insert_dast_finding(&server, &target_id, &session_id).await;
|
||||
|
||||
// Verify data exists
|
||||
assert_eq!(count_docs(&server, "dast_targets").await, 1);
|
||||
assert_eq!(count_docs(&server, "pentest_sessions").await, 1);
|
||||
assert_eq!(count_docs(&server, "attack_chain_nodes").await, 1);
|
||||
assert_eq!(count_docs(&server, "dast_findings").await, 1);
|
||||
|
||||
// Delete the repo
|
||||
let resp = server
|
||||
.delete(&format!("/api/v1/repositories/{repo_id}"))
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// All downstream data should be gone
|
||||
assert_eq!(count_docs(&server, "dast_targets").await, 0);
|
||||
assert_eq!(count_docs(&server, "pentest_sessions").await, 0);
|
||||
assert_eq!(count_docs(&server, "attack_chain_nodes").await, 0);
|
||||
assert_eq!(count_docs(&server, "dast_findings").await, 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_repo_cascades_sast_findings_and_sbom() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Create a repo
|
||||
let resp = server
|
||||
.post(
|
||||
"/api/v1/repositories",
|
||||
&json!({
|
||||
"name": "sast-cascade",
|
||||
"git_url": "https://github.com/example/sast-cascade.git",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
|
||||
|
||||
// Insert SAST finding and SBOM entry
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
let now = mongodb::bson::DateTime::now();
|
||||
|
||||
db.collection::<mongodb::bson::Document>("findings")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"repo_id": &repo_id,
|
||||
"fingerprint": "fp-test-1",
|
||||
"scanner": "semgrep",
|
||||
"scan_type": "sast",
|
||||
"title": "SQL Injection",
|
||||
"description": "desc",
|
||||
"severity": "critical",
|
||||
"status": "open",
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.collection::<mongodb::bson::Document>("sbom_entries")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"repo_id": &repo_id,
|
||||
"name": "lodash",
|
||||
"version": "4.17.20",
|
||||
"package_manager": "npm",
|
||||
"known_vulnerabilities": [],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(count_docs(&server, "findings").await, 1);
|
||||
assert_eq!(count_docs(&server, "sbom_entries").await, 1);
|
||||
|
||||
// Delete repo
|
||||
server
|
||||
.delete(&format!("/api/v1/repositories/{repo_id}"))
|
||||
.await;
|
||||
|
||||
// Both should be gone
|
||||
assert_eq!(count_docs(&server, "findings").await, 0);
|
||||
assert_eq!(count_docs(&server, "sbom_entries").await, 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
use serde_json::json;
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_and_list_dast_targets() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Initially empty
|
||||
let resp = server.get("/api/v1/dast/targets").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"].as_array().unwrap().len(), 0);
|
||||
|
||||
// Add a target
|
||||
let resp = server
|
||||
.post(
|
||||
"/api/v1/dast/targets",
|
||||
&json!({
|
||||
"name": "test-app",
|
||||
"base_url": "https://test-app.example.com",
|
||||
"target_type": "webapp",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// List should return 1
|
||||
let resp = server.get("/api/v1/dast/targets").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let targets = body["data"].as_array().unwrap();
|
||||
assert_eq!(targets.len(), 1);
|
||||
assert_eq!(targets[0]["name"], "test-app");
|
||||
assert_eq!(targets[0]["base_url"], "https://test-app.example.com");
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_dast_findings_empty() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server.get("/api/v1/dast/findings").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"].as_array().unwrap().len(), 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
use serde_json::json;
|
||||
|
||||
/// Helper: insert a finding directly via MongoDB for testing query endpoints.
|
||||
async fn insert_finding(server: &TestServer, repo_id: &str, title: &str, severity: &str) {
|
||||
// We insert via the agent's DB by posting to the internal test path.
|
||||
// Since there's no direct "create finding" API, we use MongoDB directly.
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
|
||||
// Extract the database name from the server's unique DB
|
||||
// We'll use the agent's internal DB through the stats endpoint to verify
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
|
||||
// Get the DB name from the test server by parsing the health response
|
||||
// For now, we use a direct insert approach
|
||||
let db = client.database(&server.db_name());
|
||||
|
||||
let now = mongodb::bson::DateTime::now();
|
||||
db.collection::<mongodb::bson::Document>("findings")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"repo_id": repo_id,
|
||||
"fingerprint": format!("fp-{title}-{severity}"),
|
||||
"scanner": "test-scanner",
|
||||
"scan_type": "sast",
|
||||
"title": title,
|
||||
"description": format!("Test finding: {title}"),
|
||||
"severity": severity,
|
||||
"status": "open",
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_findings_empty() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server.get("/api/v1/findings").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(body["total"], 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_findings_with_data() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
insert_finding(&server, "repo1", "SQL Injection", "critical").await;
|
||||
insert_finding(&server, "repo1", "XSS", "high").await;
|
||||
insert_finding(&server, "repo2", "Info Leak", "low").await;
|
||||
|
||||
let resp = server.get("/api/v1/findings").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["total"], 3);
|
||||
|
||||
// Filter by severity
|
||||
let resp = server.get("/api/v1/findings?severity=critical").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["total"], 1);
|
||||
assert_eq!(body["data"][0]["title"], "SQL Injection");
|
||||
|
||||
// Filter by repo
|
||||
let resp = server.get("/api/v1/findings?repo_id=repo1").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["total"], 2);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn update_finding_status() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
insert_finding(&server, "repo1", "Test Bug", "medium").await;
|
||||
|
||||
// Get the finding ID
|
||||
let resp = server.get("/api/v1/findings").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let finding_id = body["data"][0]["_id"]["$oid"].as_str().unwrap();
|
||||
|
||||
// Update status to resolved
|
||||
let resp = server
|
||||
.patch(
|
||||
&format!("/api/v1/findings/{finding_id}/status"),
|
||||
&json!({ "status": "resolved" }),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// Verify it's updated
|
||||
let resp = server.get(&format!("/api/v1/findings/{finding_id}")).await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"]["status"], "resolved");
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bulk_update_finding_status() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
insert_finding(&server, "repo1", "Bug A", "high").await;
|
||||
insert_finding(&server, "repo1", "Bug B", "high").await;
|
||||
|
||||
// Get both finding IDs
|
||||
let resp = server.get("/api/v1/findings").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let ids: Vec<String> = body["data"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|f| f["_id"]["$oid"].as_str().unwrap().to_string())
|
||||
.collect();
|
||||
|
||||
// Bulk update
|
||||
let resp = server
|
||||
.patch(
|
||||
"/api/v1/findings/bulk-status",
|
||||
&json!({
|
||||
"ids": ids,
|
||||
"status": "false_positive"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// Verify both are updated
|
||||
for id in &ids {
|
||||
let resp = server.get(&format!("/api/v1/findings/{id}")).await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"]["status"], "false_positive");
|
||||
}
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
|
||||
#[tokio::test]
|
||||
async fn health_endpoint_returns_ok() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server.get("/api/v1/health").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["status"], "ok");
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stats_overview_returns_zeroes_on_empty_db() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server.get("/api/v1/stats/overview").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let data = &body["data"];
|
||||
assert_eq!(data["repositories"], 0);
|
||||
assert_eq!(data["total_findings"], 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
mod cascade_delete;
|
||||
mod dast;
|
||||
mod findings;
|
||||
mod health;
|
||||
mod repositories;
|
||||
mod stats;
|
||||
@@ -1,110 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
use serde_json::json;
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_and_list_repository() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Initially empty
|
||||
let resp = server.get("/api/v1/repositories").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"].as_array().unwrap().len(), 0);
|
||||
|
||||
// Add a repository
|
||||
let resp = server
|
||||
.post(
|
||||
"/api/v1/repositories",
|
||||
&json!({
|
||||
"name": "test-repo",
|
||||
"git_url": "https://github.com/example/test-repo.git",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
|
||||
assert!(!repo_id.is_empty());
|
||||
|
||||
// List should now return 1
|
||||
let resp = server.get("/api/v1/repositories").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let repos = body["data"].as_array().unwrap();
|
||||
assert_eq!(repos.len(), 1);
|
||||
assert_eq!(repos[0]["name"], "test-repo");
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_duplicate_repository_fails() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let payload = json!({
|
||||
"name": "dup-repo",
|
||||
"git_url": "https://github.com/example/dup-repo.git",
|
||||
});
|
||||
|
||||
// First add succeeds
|
||||
let resp = server.post("/api/v1/repositories", &payload).await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// Second add with same git_url should fail (unique index)
|
||||
let resp = server.post("/api/v1/repositories", &payload).await;
|
||||
assert_ne!(resp.status(), 200);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_repository() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Add a repo
|
||||
let resp = server
|
||||
.post(
|
||||
"/api/v1/repositories",
|
||||
&json!({
|
||||
"name": "to-delete",
|
||||
"git_url": "https://github.com/example/to-delete.git",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let repo_id = body["data"]["id"].as_str().unwrap();
|
||||
|
||||
// Delete it
|
||||
let resp = server
|
||||
.delete(&format!("/api/v1/repositories/{repo_id}"))
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
// List should be empty again
|
||||
let resp = server.get("/api/v1/repositories").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"].as_array().unwrap().len(), 0);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_nonexistent_repository_returns_404() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server
|
||||
.delete("/api/v1/repositories/000000000000000000000000")
|
||||
.await;
|
||||
assert_eq!(resp.status(), 404);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_invalid_id_returns_400() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
let resp = server.delete("/api/v1/repositories/not-a-valid-id").await;
|
||||
assert_eq!(resp.status(), 400);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
use crate::common::TestServer;
|
||||
use serde_json::json;
|
||||
|
||||
#[tokio::test]
|
||||
async fn stats_overview_reflects_inserted_data() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Add a repo
|
||||
server
|
||||
.post(
|
||||
"/api/v1/repositories",
|
||||
&json!({
|
||||
"name": "stats-repo",
|
||||
"git_url": "https://github.com/example/stats-repo.git",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Insert findings directly
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
let now = mongodb::bson::DateTime::now();
|
||||
|
||||
for (title, severity) in [
|
||||
("Critical Bug", "critical"),
|
||||
("High Bug", "high"),
|
||||
("Medium Bug", "medium"),
|
||||
("Low Bug", "low"),
|
||||
] {
|
||||
db.collection::<mongodb::bson::Document>("findings")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"repo_id": "test-repo-id",
|
||||
"fingerprint": format!("fp-{title}"),
|
||||
"scanner": "test",
|
||||
"scan_type": "sast",
|
||||
"title": title,
|
||||
"description": "desc",
|
||||
"severity": severity,
|
||||
"status": "open",
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let resp = server.get("/api/v1/stats/overview").await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
let data = &body["data"];
|
||||
assert_eq!(data["repositories"], 1);
|
||||
assert_eq!(data["total_findings"], 4);
|
||||
assert_eq!(data["critical"], 1);
|
||||
assert_eq!(data["high"], 1);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn stats_update_after_finding_status_change() {
|
||||
let server = TestServer::start().await;
|
||||
|
||||
// Insert a finding
|
||||
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
|
||||
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
|
||||
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
|
||||
let db = client.database(&server.db_name());
|
||||
let now = mongodb::bson::DateTime::now();
|
||||
|
||||
let result = db
|
||||
.collection::<mongodb::bson::Document>("findings")
|
||||
.insert_one(mongodb::bson::doc! {
|
||||
"repo_id": "repo-1",
|
||||
"fingerprint": "fp-stats-test",
|
||||
"scanner": "test",
|
||||
"scan_type": "sast",
|
||||
"title": "Stats Test Finding",
|
||||
"description": "desc",
|
||||
"severity": "high",
|
||||
"status": "open",
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let finding_id = result.inserted_id.as_object_id().unwrap().to_hex();
|
||||
|
||||
// Stats should show 1 finding
|
||||
let resp = server.get("/api/v1/stats/overview").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
assert_eq!(body["data"]["total_findings"], 1);
|
||||
|
||||
// Mark it as resolved
|
||||
server
|
||||
.patch(
|
||||
&format!("/api/v1/findings/{finding_id}/status"),
|
||||
&json!({ "status": "resolved" }),
|
||||
)
|
||||
.await;
|
||||
|
||||
// The finding still exists (status changed, not deleted)
|
||||
let resp = server.get("/api/v1/stats/overview").await;
|
||||
let body: serde_json::Value = resp.json().await.unwrap();
|
||||
// total_findings counts all findings regardless of status
|
||||
assert_eq!(body["data"]["total_findings"], 1);
|
||||
|
||||
server.cleanup().await;
|
||||
}
|
||||
@@ -1,9 +1,4 @@
|
||||
// E2E / Integration tests for the compliance-agent API.
|
||||
// Integration tests for the compliance-agent crate.
|
||||
//
|
||||
// These tests require a running MongoDB instance. Set TEST_MONGODB_URI
|
||||
// if it's not at the default `mongodb://root:example@localhost:27017`.
|
||||
//
|
||||
// Run with: cargo test -p compliance-agent --test e2e
|
||||
// Or nightly: (via CI with MongoDB service container)
|
||||
|
||||
mod api;
|
||||
// Add tests that exercise the full pipeline, API handlers,
|
||||
// and cross-module interactions here.
|
||||
|
||||
@@ -7,7 +7,6 @@ pub mod finding;
|
||||
pub mod graph;
|
||||
pub mod issue;
|
||||
pub mod mcp;
|
||||
pub mod notification;
|
||||
pub mod pentest;
|
||||
pub mod repository;
|
||||
pub mod sbom;
|
||||
@@ -28,7 +27,6 @@ pub use graph::{
|
||||
};
|
||||
pub use issue::{IssueStatus, TrackerIssue, TrackerType};
|
||||
pub use mcp::{McpServerConfig, McpServerStatus, McpTransport};
|
||||
pub use notification::{CveNotification, NotificationSeverity, NotificationStatus};
|
||||
pub use pentest::{
|
||||
AttackChainNode, AttackNodeStatus, AuthMode, CodeContextHint, Environment, IdentityProvider,
|
||||
PentestAuthConfig, PentestConfig, PentestEvent, PentestMessage, PentestSession, PentestStats,
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Status of a CVE notification
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NotificationStatus {
|
||||
/// Newly created, not yet seen by the user
|
||||
New,
|
||||
/// User has seen it (e.g., opened the notification panel)
|
||||
Read,
|
||||
/// User has explicitly acknowledged/dismissed it
|
||||
Dismissed,
|
||||
}
|
||||
|
||||
/// Severity level for notification filtering
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum NotificationSeverity {
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Critical,
|
||||
}
|
||||
|
||||
/// A notification about a newly discovered CVE affecting a tracked dependency.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CveNotification {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<bson::oid::ObjectId>,
|
||||
/// The CVE/GHSA identifier
|
||||
pub cve_id: String,
|
||||
/// Repository where the vulnerable dependency is used
|
||||
pub repo_id: String,
|
||||
/// Repository name (denormalized for display)
|
||||
pub repo_name: String,
|
||||
/// Affected package name
|
||||
pub package_name: String,
|
||||
/// Affected version
|
||||
pub package_version: String,
|
||||
/// Human-readable severity
|
||||
pub severity: NotificationSeverity,
|
||||
/// CVSS score if available
|
||||
pub cvss_score: Option<f64>,
|
||||
/// Short summary of the vulnerability
|
||||
pub summary: Option<String>,
|
||||
/// Link to vulnerability details
|
||||
pub url: Option<String>,
|
||||
/// Notification lifecycle status
|
||||
pub status: NotificationStatus,
|
||||
/// When the CVE was first detected for this dependency
|
||||
#[serde(with = "super::serde_helpers::bson_datetime")]
|
||||
pub created_at: DateTime<Utc>,
|
||||
/// When the user last interacted with this notification
|
||||
pub read_at: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl CveNotification {
|
||||
pub fn new(
|
||||
cve_id: String,
|
||||
repo_id: String,
|
||||
repo_name: String,
|
||||
package_name: String,
|
||||
package_version: String,
|
||||
severity: NotificationSeverity,
|
||||
) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
cve_id,
|
||||
repo_id,
|
||||
repo_name,
|
||||
package_name,
|
||||
package_version,
|
||||
severity,
|
||||
cvss_score: None,
|
||||
summary: None,
|
||||
url: None,
|
||||
status: NotificationStatus::New,
|
||||
created_at: Utc::now(),
|
||||
read_at: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Map an OSV/NVD severity string to our notification severity
|
||||
pub fn parse_severity(s: Option<&str>, cvss: Option<f64>) -> NotificationSeverity {
|
||||
// Prefer CVSS score if available
|
||||
if let Some(score) = cvss {
|
||||
return match score {
|
||||
s if s >= 9.0 => NotificationSeverity::Critical,
|
||||
s if s >= 7.0 => NotificationSeverity::High,
|
||||
s if s >= 4.0 => NotificationSeverity::Medium,
|
||||
_ => NotificationSeverity::Low,
|
||||
};
|
||||
}
|
||||
// Fall back to string severity
|
||||
match s.map(|s| s.to_uppercase()).as_deref() {
|
||||
Some("CRITICAL") => NotificationSeverity::Critical,
|
||||
Some("HIGH") => NotificationSeverity::High,
|
||||
Some("MODERATE" | "MEDIUM") => NotificationSeverity::Medium,
|
||||
_ => NotificationSeverity::Low,
|
||||
}
|
||||
}
|
||||
@@ -3847,45 +3847,3 @@ tbody tr:last-child td {
|
||||
.help-chat-send:not(:disabled):hover {
|
||||
background: var(--accent-hover);
|
||||
}
|
||||
|
||||
/* ═══════════════════════════════════════════════════════════════
|
||||
NOTIFICATION BELL — CVE alert dropdown
|
||||
═══════════════════════════════════════════════════════════════ */
|
||||
.notification-bell-wrapper { position: fixed; top: 16px; right: 28px; z-index: 48; }
|
||||
.notification-bell-btn { position: relative; background: var(--bg-elevated); border: 1px solid var(--border); border-radius: 10px; padding: 8px 10px; color: var(--text-secondary); cursor: pointer; display: flex; align-items: center; transition: color 0.15s, border-color 0.15s; }
|
||||
.notification-bell-btn:hover { color: var(--text-primary); border-color: var(--border-bright); }
|
||||
.notification-badge { position: absolute; top: -4px; right: -4px; background: var(--danger); color: #fff; font-size: 10px; font-weight: 700; min-width: 18px; height: 18px; border-radius: 9px; display: flex; align-items: center; justify-content: center; padding: 0 4px; font-family: 'Outfit', sans-serif; }
|
||||
.notification-panel { position: absolute; top: 44px; right: 0; width: 380px; max-height: 480px; background: var(--bg-secondary); border: 1px solid var(--border-bright); border-radius: 12px; overflow: hidden; box-shadow: 0 12px 48px rgba(0,0,0,0.5); display: flex; flex-direction: column; }
|
||||
.notification-panel-header { display: flex; align-items: center; justify-content: space-between; padding: 12px 16px; border-bottom: 1px solid var(--border); font-family: 'Outfit', sans-serif; font-weight: 600; font-size: 14px; color: var(--text-primary); }
|
||||
.notification-close-btn { background: none; border: none; color: var(--text-secondary); cursor: pointer; padding: 2px; }
|
||||
.notification-panel-body { overflow-y: auto; flex: 1; padding: 8px; }
|
||||
.notification-loading, .notification-empty { display: flex; flex-direction: column; align-items: center; justify-content: center; padding: 32px 16px; color: var(--text-secondary); font-size: 13px; gap: 8px; }
|
||||
.notification-item { padding: 10px 12px; border-radius: 8px; margin-bottom: 4px; background: var(--bg-card); border: 1px solid var(--border); transition: border-color 0.15s; }
|
||||
.notification-item:hover { border-color: var(--border-bright); }
|
||||
.notification-item-header { display: flex; align-items: center; gap: 8px; margin-bottom: 4px; }
|
||||
.notification-sev { font-size: 10px; font-weight: 700; padding: 2px 6px; border-radius: 4px; text-transform: uppercase; letter-spacing: 0.5px; font-family: 'Outfit', sans-serif; }
|
||||
.notification-sev.sev-critical { background: var(--danger-bg); color: var(--danger); }
|
||||
.notification-sev.sev-high { background: rgba(255,140,0,0.12); color: #ff8c00; }
|
||||
.notification-sev.sev-medium { background: var(--warning-bg); color: var(--warning); }
|
||||
.notification-sev.sev-low { background: rgba(0,200,255,0.08); color: var(--accent); }
|
||||
.notification-cve-id { font-size: 12px; font-weight: 600; color: var(--text-primary); font-family: 'JetBrains Mono', monospace; }
|
||||
.notification-cve-id a { color: var(--accent); text-decoration: none; }
|
||||
.notification-cve-id a:hover { text-decoration: underline; }
|
||||
.notification-cvss { font-size: 10px; color: var(--text-secondary); margin-left: auto; font-family: 'JetBrains Mono', monospace; }
|
||||
.notification-dismiss-btn { background: none; border: none; color: var(--text-tertiary); cursor: pointer; padding: 2px; margin-left: 4px; }
|
||||
.notification-dismiss-btn:hover { color: var(--danger); }
|
||||
.notification-item-pkg { font-size: 12px; color: var(--text-primary); font-family: 'JetBrains Mono', monospace; }
|
||||
.notification-item-repo { font-size: 11px; color: var(--text-secondary); margin-bottom: 4px; }
|
||||
.notification-item-summary { font-size: 11px; color: var(--text-secondary); line-height: 1.4; display: -webkit-box; -webkit-line-clamp: 2; -webkit-box-orient: vertical; overflow: hidden; }
|
||||
|
||||
/* ═══════════════════════════════════════════════════════════════
|
||||
COPY BUTTON — Reusable clipboard copy component
|
||||
═══════════════════════════════════════════════════════════════ */
|
||||
.copy-btn { background: none; border: 1px solid var(--border); border-radius: 6px; padding: 5px 7px; color: var(--text-secondary); cursor: pointer; display: inline-flex; align-items: center; transition: color 0.15s, border-color 0.15s, background 0.15s; flex-shrink: 0; }
|
||||
.copy-btn:hover { color: var(--accent); border-color: var(--accent); background: var(--accent-muted); }
|
||||
.copy-btn-sm { padding: 3px 5px; border-radius: 4px; }
|
||||
/* Copyable inline field pattern: value + copy button side by side */
|
||||
.copyable { display: flex; align-items: center; gap: 6px; }
|
||||
.copyable code, .copyable .mono { flex: 1; min-width: 0; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.code-snippet-wrapper { position: relative; }
|
||||
.code-snippet-header { display: flex; align-items: center; justify-content: space-between; margin-bottom: 4px; gap: 8px; }
|
||||
|
||||
@@ -2,7 +2,6 @@ use dioxus::prelude::*;
|
||||
|
||||
use crate::app::Route;
|
||||
use crate::components::help_chat::HelpChat;
|
||||
use crate::components::notification_bell::NotificationBell;
|
||||
use crate::components::sidebar::Sidebar;
|
||||
use crate::components::toast::{ToastContainer, Toasts};
|
||||
use crate::infrastructure::auth_check::check_auth;
|
||||
@@ -22,7 +21,6 @@ pub fn AppShell() -> Element {
|
||||
main { class: "main-content",
|
||||
Outlet::<Route> {}
|
||||
}
|
||||
NotificationBell {}
|
||||
ToastContainer {}
|
||||
HelpChat {}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::copy_button::CopyButton;
|
||||
|
||||
#[component]
|
||||
pub fn CodeSnippet(
|
||||
code: String,
|
||||
@@ -9,18 +7,15 @@ pub fn CodeSnippet(
|
||||
#[props(default)] line_number: u32,
|
||||
) -> Element {
|
||||
rsx! {
|
||||
div { class: "code-snippet-wrapper",
|
||||
div { class: "code-snippet-header",
|
||||
if !file_path.is_empty() {
|
||||
span {
|
||||
style: "font-size: 12px; color: var(--text-secondary); font-family: monospace;",
|
||||
"{file_path}"
|
||||
if line_number > 0 {
|
||||
":{line_number}"
|
||||
}
|
||||
div {
|
||||
if !file_path.is_empty() {
|
||||
div {
|
||||
style: "font-size: 12px; color: var(--text-secondary); margin-bottom: 4px; font-family: monospace;",
|
||||
"{file_path}"
|
||||
if line_number > 0 {
|
||||
":{line_number}"
|
||||
}
|
||||
}
|
||||
CopyButton { value: code.clone(), small: true }
|
||||
}
|
||||
pre { class: "code-block", "{code}" }
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
use dioxus::prelude::*;
|
||||
use dioxus_free_icons::icons::bs_icons::*;
|
||||
use dioxus_free_icons::Icon;
|
||||
|
||||
/// A small copy-to-clipboard button that shows a checkmark after copying.
|
||||
///
|
||||
/// Usage: `CopyButton { value: "text to copy" }`
|
||||
#[component]
|
||||
pub fn CopyButton(value: String, #[props(default = false)] small: bool) -> Element {
|
||||
let mut copied = use_signal(|| false);
|
||||
|
||||
let size = if small { 12 } else { 14 };
|
||||
let class = if small {
|
||||
"copy-btn copy-btn-sm"
|
||||
} else {
|
||||
"copy-btn"
|
||||
};
|
||||
|
||||
rsx! {
|
||||
button {
|
||||
class: class,
|
||||
title: if copied() { "Copied!" } else { "Copy to clipboard" },
|
||||
onclick: move |_| {
|
||||
let val = value.clone();
|
||||
// Escape for JS single-quoted string
|
||||
let escaped = val
|
||||
.replace('\\', "\\\\")
|
||||
.replace('\'', "\\'")
|
||||
.replace('\n', "\\n")
|
||||
.replace('\r', "\\r");
|
||||
let js = format!("navigator.clipboard.writeText('{escaped}')");
|
||||
document::eval(&js);
|
||||
copied.set(true);
|
||||
spawn(async move {
|
||||
#[cfg(feature = "web")]
|
||||
gloo_timers::future::TimeoutFuture::new(2000).await;
|
||||
#[cfg(not(feature = "web"))]
|
||||
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
|
||||
copied.set(false);
|
||||
});
|
||||
},
|
||||
if copied() {
|
||||
Icon { icon: BsCheckLg, width: size, height: size }
|
||||
} else {
|
||||
Icon { icon: BsClipboard, width: size, height: size }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,10 +2,8 @@ pub mod app_shell;
|
||||
pub mod attack_chain;
|
||||
pub mod code_inspector;
|
||||
pub mod code_snippet;
|
||||
pub mod copy_button;
|
||||
pub mod file_tree;
|
||||
pub mod help_chat;
|
||||
pub mod notification_bell;
|
||||
pub mod page_header;
|
||||
pub mod pagination;
|
||||
pub mod pentest_wizard;
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
use dioxus::prelude::*;
|
||||
use dioxus_free_icons::icons::bs_icons::*;
|
||||
use dioxus_free_icons::Icon;
|
||||
|
||||
use crate::infrastructure::notifications::{
|
||||
dismiss_notification, fetch_notification_count, fetch_notifications,
|
||||
mark_all_notifications_read,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn NotificationBell() -> Element {
|
||||
let mut is_open = use_signal(|| false);
|
||||
let mut count = use_signal(|| 0u64);
|
||||
let mut notifications = use_signal(Vec::new);
|
||||
let mut is_loading = use_signal(|| false);
|
||||
|
||||
// Poll notification count every 30 seconds
|
||||
use_resource(move || async move {
|
||||
loop {
|
||||
if let Ok(c) = fetch_notification_count().await {
|
||||
count.set(c);
|
||||
}
|
||||
#[cfg(feature = "web")]
|
||||
{
|
||||
gloo_timers::future::TimeoutFuture::new(30_000).await;
|
||||
}
|
||||
#[cfg(not(feature = "web"))]
|
||||
{
|
||||
tokio::time::sleep(std::time::Duration::from_secs(30)).await;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Load notifications when panel opens
|
||||
let load_notifications = move |_| {
|
||||
is_open.set(!is_open());
|
||||
if !is_open() {
|
||||
return;
|
||||
}
|
||||
is_loading.set(true);
|
||||
spawn(async move {
|
||||
if let Ok(resp) = fetch_notifications().await {
|
||||
notifications.set(resp.data);
|
||||
}
|
||||
// Mark all as read when panel opens
|
||||
let _ = mark_all_notifications_read().await;
|
||||
count.set(0);
|
||||
is_loading.set(false);
|
||||
});
|
||||
};
|
||||
|
||||
let on_dismiss = move |id: String| {
|
||||
spawn(async move {
|
||||
let _ = dismiss_notification(id.clone()).await;
|
||||
notifications.write().retain(|n| {
|
||||
n.id.as_ref()
|
||||
.and_then(|v| v.get("$oid"))
|
||||
.and_then(|v| v.as_str())
|
||||
!= Some(&id)
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
rsx! {
|
||||
div { class: "notification-bell-wrapper",
|
||||
// Bell button
|
||||
button {
|
||||
class: "notification-bell-btn",
|
||||
onclick: load_notifications,
|
||||
title: "CVE Alerts",
|
||||
Icon { icon: BsBell, width: 18, height: 18 }
|
||||
if count() > 0 {
|
||||
span { class: "notification-badge", "{count()}" }
|
||||
}
|
||||
}
|
||||
|
||||
// Dropdown panel
|
||||
if is_open() {
|
||||
div { class: "notification-panel",
|
||||
div { class: "notification-panel-header",
|
||||
span { "CVE Alerts" }
|
||||
button {
|
||||
class: "notification-close-btn",
|
||||
onclick: move |_| is_open.set(false),
|
||||
Icon { icon: BsX, width: 16, height: 16 }
|
||||
}
|
||||
}
|
||||
div { class: "notification-panel-body",
|
||||
if is_loading() {
|
||||
div { class: "notification-loading", "Loading..." }
|
||||
} else if notifications().is_empty() {
|
||||
div { class: "notification-empty",
|
||||
Icon { icon: BsShieldCheck, width: 32, height: 32 }
|
||||
p { "No CVE alerts" }
|
||||
}
|
||||
} else {
|
||||
for notif in notifications().iter() {
|
||||
{
|
||||
let id = notif.id.as_ref()
|
||||
.and_then(|v| v.get("$oid"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let sev_class = match notif.severity.as_str() {
|
||||
"critical" => "sev-critical",
|
||||
"high" => "sev-high",
|
||||
"medium" => "sev-medium",
|
||||
_ => "sev-low",
|
||||
};
|
||||
let dismiss_id = id.clone();
|
||||
rsx! {
|
||||
div { class: "notification-item",
|
||||
div { class: "notification-item-header",
|
||||
span { class: "notification-sev {sev_class}",
|
||||
"{notif.severity.to_uppercase()}"
|
||||
}
|
||||
span { class: "notification-cve-id",
|
||||
if let Some(ref url) = notif.url {
|
||||
a { href: "{url}", target: "_blank", "{notif.cve_id}" }
|
||||
} else {
|
||||
"{notif.cve_id}"
|
||||
}
|
||||
}
|
||||
if let Some(score) = notif.cvss_score {
|
||||
span { class: "notification-cvss", "CVSS {score:.1}" }
|
||||
}
|
||||
button {
|
||||
class: "notification-dismiss-btn",
|
||||
title: "Dismiss",
|
||||
onclick: move |_| on_dismiss(dismiss_id.clone()),
|
||||
Icon { icon: BsXCircle, width: 14, height: 14 }
|
||||
}
|
||||
}
|
||||
div { class: "notification-item-pkg",
|
||||
"{notif.package_name} {notif.package_version}"
|
||||
}
|
||||
div { class: "notification-item-repo",
|
||||
"{notif.repo_name}"
|
||||
}
|
||||
if let Some(ref summary) = notif.summary {
|
||||
div { class: "notification-item-summary",
|
||||
"{summary}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,6 @@ pub mod graph;
|
||||
pub mod help_chat;
|
||||
pub mod issues;
|
||||
pub mod mcp;
|
||||
pub mod notifications;
|
||||
pub mod pentest;
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub mod repositories;
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct NotificationListResponse {
|
||||
pub data: Vec<CveNotificationData>,
|
||||
#[serde(default)]
|
||||
pub total: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct CveNotificationData {
|
||||
#[serde(rename = "_id")]
|
||||
pub id: Option<serde_json::Value>,
|
||||
pub cve_id: String,
|
||||
pub repo_name: String,
|
||||
pub package_name: String,
|
||||
pub package_version: String,
|
||||
pub severity: String,
|
||||
pub cvss_score: Option<f64>,
|
||||
pub summary: Option<String>,
|
||||
pub url: Option<String>,
|
||||
pub status: String,
|
||||
#[serde(default)]
|
||||
pub created_at: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct NotificationCountResponse {
|
||||
pub count: u64,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_notification_count() -> Result<u64, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let url = format!("{}/api/v1/notifications/count", state.agent_api_url);
|
||||
let resp = reqwest::get(&url)
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: NotificationCountResponse = resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body.count)
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_notifications() -> Result<NotificationListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let url = format!("{}/api/v1/notifications?limit=20", state.agent_api_url);
|
||||
let resp = reqwest::get(&url)
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: NotificationListResponse = resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn mark_all_notifications_read() -> Result<(), ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let url = format!("{}/api/v1/notifications/read-all", state.agent_api_url);
|
||||
reqwest::Client::new()
|
||||
.post(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn dismiss_notification(id: String) -> Result<(), ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let url = format!("{}/api/v1/notifications/{id}/dismiss", state.agent_api_url);
|
||||
reqwest::Client::new()
|
||||
.patch(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -259,10 +259,7 @@ pub fn McpServersPage() -> Element {
|
||||
div { class: "mcp-detail-row",
|
||||
Icon { icon: BsGlobe, width: 13, height: 13 }
|
||||
span { class: "mcp-detail-label", "Endpoint" }
|
||||
div { class: "copyable",
|
||||
code { class: "mcp-detail-value", "{server.endpoint_url}" }
|
||||
crate::components::copy_button::CopyButton { value: server.endpoint_url.clone(), small: true }
|
||||
}
|
||||
code { class: "mcp-detail-value", "{server.endpoint_url}" }
|
||||
}
|
||||
div { class: "mcp-detail-row",
|
||||
Icon { icon: BsHddNetwork, width: 13, height: 13 }
|
||||
|
||||
@@ -137,18 +137,11 @@ pub fn RepositoriesPage() -> Element {
|
||||
"For SSH URLs: add this deploy key (read-only) to your repository"
|
||||
}
|
||||
div {
|
||||
class: "copyable",
|
||||
style: "margin-top: 4px; padding: 8px; background: var(--bg-secondary); border-radius: 4px;",
|
||||
code {
|
||||
style: "font-size: 11px; word-break: break-all; user-select: all;",
|
||||
if ssh_public_key().is_empty() {
|
||||
"Loading..."
|
||||
} else {
|
||||
"{ssh_public_key}"
|
||||
}
|
||||
}
|
||||
if !ssh_public_key().is_empty() {
|
||||
crate::components::copy_button::CopyButton { value: ssh_public_key(), small: true }
|
||||
style: "margin-top: 4px; padding: 8px; background: var(--bg-secondary); border-radius: 4px; font-family: monospace; font-size: 11px; word-break: break-all; user-select: all;",
|
||||
if ssh_public_key().is_empty() {
|
||||
"Loading..."
|
||||
} else {
|
||||
"{ssh_public_key}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -397,37 +390,28 @@ pub fn RepositoriesPage() -> Element {
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Webhook URL" }
|
||||
{
|
||||
#[cfg(feature = "web")]
|
||||
let origin = web_sys::window()
|
||||
.and_then(|w: web_sys::Window| w.location().origin().ok())
|
||||
.unwrap_or_default();
|
||||
#[cfg(not(feature = "web"))]
|
||||
let origin = String::new();
|
||||
let webhook_url = format!("{origin}/webhook/{}/{eid}", edit_webhook_tracker());
|
||||
rsx! {
|
||||
div { class: "copyable",
|
||||
input {
|
||||
r#type: "text",
|
||||
readonly: true,
|
||||
style: "font-family: monospace; font-size: 12px; flex: 1;",
|
||||
value: "{webhook_url}",
|
||||
}
|
||||
crate::components::copy_button::CopyButton { value: webhook_url.clone() }
|
||||
}
|
||||
}
|
||||
input {
|
||||
r#type: "text",
|
||||
readonly: true,
|
||||
style: "font-family: monospace; font-size: 12px;",
|
||||
value: {
|
||||
#[cfg(feature = "web")]
|
||||
let origin = web_sys::window()
|
||||
.and_then(|w: web_sys::Window| w.location().origin().ok())
|
||||
.unwrap_or_default();
|
||||
#[cfg(not(feature = "web"))]
|
||||
let origin = String::new();
|
||||
format!("{origin}/webhook/{}/{eid}", edit_webhook_tracker())
|
||||
},
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Webhook Secret" }
|
||||
div { class: "copyable",
|
||||
input {
|
||||
r#type: "text",
|
||||
readonly: true,
|
||||
style: "font-family: monospace; font-size: 12px; flex: 1;",
|
||||
value: "{secret}",
|
||||
}
|
||||
crate::components::copy_button::CopyButton { value: secret.clone() }
|
||||
input {
|
||||
r#type: "text",
|
||||
readonly: true,
|
||||
style: "font-family: monospace; font-size: 12px;",
|
||||
value: "{secret}",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,30 +15,6 @@ use crate::parsers::registry::ParserRegistry;
|
||||
use super::community::detect_communities;
|
||||
use super::impact::ImpactAnalyzer;
|
||||
|
||||
/// Walk up the qualified-name hierarchy to find the closest ancestor
|
||||
/// that exists in the node map.
|
||||
///
|
||||
/// For `"src/main.rs::config::load"` this tries:
|
||||
/// 1. `"src/main.rs::config"` (trim last `::` segment)
|
||||
/// 2. `"src/main.rs"` (trim again)
|
||||
///
|
||||
/// Returns the first match found, or `None` if the node is a root.
|
||||
fn find_parent_qname(qname: &str, node_map: &HashMap<String, NodeIndex>) -> Option<String> {
|
||||
let mut current = qname.to_string();
|
||||
loop {
|
||||
// Try stripping the last "::" segment
|
||||
if let Some(pos) = current.rfind("::") {
|
||||
current.truncate(pos);
|
||||
if node_map.contains_key(¤t) {
|
||||
return Some(current);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// No more "::" — this is a top-level node (file), no parent
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
/// The main graph engine that builds and manages code knowledge graphs
|
||||
pub struct GraphEngine {
|
||||
parser_registry: ParserRegistry,
|
||||
@@ -113,12 +89,7 @@ impl GraphEngine {
|
||||
Ok((code_graph, build_run))
|
||||
}
|
||||
|
||||
/// Build petgraph from parsed output, resolving edges to node indices.
|
||||
///
|
||||
/// After resolving the explicit edges from parsers, we synthesise
|
||||
/// `Contains` edges so that every node is reachable from its parent
|
||||
/// file or module. This eliminates disconnected "islands" that
|
||||
/// otherwise appear when files share no direct call/import edges.
|
||||
/// Build petgraph from parsed output, resolving edges to node indices
|
||||
fn build_petgraph(&self, parse_output: ParseOutput) -> Result<CodeGraph, CoreError> {
|
||||
let mut graph = DiGraph::new();
|
||||
let mut node_map: HashMap<String, NodeIndex> = HashMap::new();
|
||||
@@ -131,13 +102,15 @@ impl GraphEngine {
|
||||
node_map.insert(node.qualified_name.clone(), idx);
|
||||
}
|
||||
|
||||
// Resolve and add explicit edges from parsers
|
||||
// Resolve and add edges — rewrite target to the resolved qualified name
|
||||
// so the persisted edge references match node qualified_names.
|
||||
let mut resolved_edges = Vec::new();
|
||||
for mut edge in parse_output.edges {
|
||||
let source_idx = node_map.get(&edge.source);
|
||||
let resolved = self.resolve_edge_target(&edge.target, &node_map);
|
||||
|
||||
if let (Some(&src), Some(tgt)) = (source_idx, resolved) {
|
||||
// Update target to the resolved qualified name
|
||||
let resolved_name = node_map
|
||||
.iter()
|
||||
.find(|(_, &idx)| idx == tgt)
|
||||
@@ -148,48 +121,7 @@ impl GraphEngine {
|
||||
graph.add_edge(src, tgt, edge.kind.clone());
|
||||
resolved_edges.push(edge);
|
||||
}
|
||||
}
|
||||
|
||||
// Synthesise Contains edges: connect each node to its closest
|
||||
// parent in the qualified-name hierarchy.
|
||||
//
|
||||
// For "src/main.rs::config::load", the parent chain is:
|
||||
// "src/main.rs::config" → "src/main.rs"
|
||||
//
|
||||
// We walk up the qualified name (splitting on "::") and link to
|
||||
// the first ancestor that exists in the node map.
|
||||
let repo_id = nodes.first().map(|n| n.repo_id.as_str()).unwrap_or("");
|
||||
let build_id = nodes
|
||||
.first()
|
||||
.map(|n| n.graph_build_id.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
let qualified_names: Vec<String> = nodes.iter().map(|n| n.qualified_name.clone()).collect();
|
||||
let file_paths: HashMap<String, String> = nodes
|
||||
.iter()
|
||||
.map(|n| (n.qualified_name.clone(), n.file_path.clone()))
|
||||
.collect();
|
||||
|
||||
for qname in &qualified_names {
|
||||
if let Some(parent_qname) = find_parent_qname(qname, &node_map) {
|
||||
let child_idx = node_map[qname];
|
||||
let parent_idx = node_map[&parent_qname];
|
||||
|
||||
// Avoid duplicate edges
|
||||
if !graph.contains_edge(parent_idx, child_idx) {
|
||||
graph.add_edge(parent_idx, child_idx, CodeEdgeKind::Contains);
|
||||
resolved_edges.push(CodeEdge {
|
||||
id: None,
|
||||
repo_id: repo_id.to_string(),
|
||||
graph_build_id: build_id.to_string(),
|
||||
source: parent_qname,
|
||||
target: qname.clone(),
|
||||
kind: CodeEdgeKind::Contains,
|
||||
file_path: file_paths.get(qname).cloned().unwrap_or_default(),
|
||||
line_number: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Skip unresolved edges (cross-file, external deps) — conservative approach
|
||||
}
|
||||
|
||||
Ok(CodeGraph {
|
||||
@@ -200,62 +132,33 @@ impl GraphEngine {
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to resolve an edge target to a known node.
|
||||
///
|
||||
/// Resolution strategies (in order):
|
||||
/// 1. Direct qualified-name match
|
||||
/// 2. Suffix match: "foo" matches "src/main.rs::mod::foo"
|
||||
/// 3. Module-path match: "config::load" matches "src/config.rs::load"
|
||||
/// 4. Self-method: "self.method" matches "::method"
|
||||
/// Try to resolve an edge target to a known node
|
||||
fn resolve_edge_target(
|
||||
&self,
|
||||
target: &str,
|
||||
node_map: &HashMap<String, NodeIndex>,
|
||||
) -> Option<NodeIndex> {
|
||||
// 1. Direct match
|
||||
// Direct match
|
||||
if let Some(idx) = node_map.get(target) {
|
||||
return Some(*idx);
|
||||
}
|
||||
|
||||
// 2. Suffix match: "foo" → "path/file.rs::foo"
|
||||
let suffix_pattern = format!("::{target}");
|
||||
let dot_pattern = format!(".{target}");
|
||||
// Try matching just the function/type name (intra-file resolution)
|
||||
for (qualified, idx) in node_map {
|
||||
if qualified.ends_with(&suffix_pattern) || qualified.ends_with(&dot_pattern) {
|
||||
// Match "foo" to "path/file.rs::foo" or "path/file.rs::Type::foo"
|
||||
if qualified.ends_with(&format!("::{target}"))
|
||||
|| qualified.ends_with(&format!(".{target}"))
|
||||
{
|
||||
return Some(*idx);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Module-path match: "config::load" → try matching the last N
|
||||
// segments of the target against node qualified names.
|
||||
// This handles cross-file calls like `crate::config::load` or
|
||||
// `super::handlers::process` where the prefix differs.
|
||||
if target.contains("::") {
|
||||
// Strip common Rust path prefixes
|
||||
let stripped = target
|
||||
.strip_prefix("crate::")
|
||||
.or_else(|| target.strip_prefix("super::"))
|
||||
.or_else(|| target.strip_prefix("self::"))
|
||||
.unwrap_or(target);
|
||||
|
||||
let segments: Vec<&str> = stripped.split("::").collect();
|
||||
// Try matching progressively shorter suffixes
|
||||
for start in 0..segments.len() {
|
||||
let suffix = segments[start..].join("::");
|
||||
let pattern = format!("::{suffix}");
|
||||
for (qualified, idx) in node_map {
|
||||
if qualified.ends_with(&pattern) {
|
||||
return Some(*idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Self-method: "self.method" → "::method"
|
||||
// Try matching method calls like "self.method" -> look for "::method"
|
||||
if let Some(method_name) = target.strip_prefix("self.") {
|
||||
let pattern = format!("::{method_name}");
|
||||
for (qualified, idx) in node_map {
|
||||
if qualified.ends_with(&pattern) {
|
||||
if qualified.ends_with(&format!("::{method_name}"))
|
||||
|| qualified.ends_with(&format!(".{method_name}"))
|
||||
{
|
||||
return Some(*idx);
|
||||
}
|
||||
}
|
||||
@@ -450,83 +353,4 @@ mod tests {
|
||||
assert!(code_graph.node_map.contains_key("a::c"));
|
||||
assert!(code_graph.node_map.contains_key("a::d"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contains_edges_synthesised() {
|
||||
let engine = GraphEngine::new(1000);
|
||||
let mut output = ParseOutput::default();
|
||||
// File → Module → Function hierarchy
|
||||
output.nodes.push(make_node("src/main.rs"));
|
||||
output.nodes.push(make_node("src/main.rs::config"));
|
||||
output.nodes.push(make_node("src/main.rs::config::load"));
|
||||
|
||||
let code_graph = engine.build_petgraph(output).unwrap();
|
||||
|
||||
// Should have 2 Contains edges:
|
||||
// src/main.rs → src/main.rs::config
|
||||
// src/main.rs::config → src/main.rs::config::load
|
||||
let contains_edges: Vec<_> = code_graph
|
||||
.edges
|
||||
.iter()
|
||||
.filter(|e| matches!(e.kind, CodeEdgeKind::Contains))
|
||||
.collect();
|
||||
assert_eq!(contains_edges.len(), 2, "expected 2 Contains edges");
|
||||
|
||||
let sources: Vec<&str> = contains_edges.iter().map(|e| e.source.as_str()).collect();
|
||||
assert!(sources.contains(&"src/main.rs"));
|
||||
assert!(sources.contains(&"src/main.rs::config"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contains_edges_no_duplicates_with_existing_edges() {
|
||||
let engine = GraphEngine::new(1000);
|
||||
let mut output = ParseOutput::default();
|
||||
output.nodes.push(make_node("src/main.rs"));
|
||||
output.nodes.push(make_node("src/main.rs::foo"));
|
||||
|
||||
// Explicit Calls edge (foo calls itself? just for testing)
|
||||
output.edges.push(CodeEdge {
|
||||
id: None,
|
||||
repo_id: "test".to_string(),
|
||||
graph_build_id: "build1".to_string(),
|
||||
source: "src/main.rs::foo".to_string(),
|
||||
target: "src/main.rs::foo".to_string(),
|
||||
kind: CodeEdgeKind::Calls,
|
||||
file_path: "src/main.rs".to_string(),
|
||||
line_number: Some(1),
|
||||
});
|
||||
|
||||
let code_graph = engine.build_petgraph(output).unwrap();
|
||||
|
||||
// 1 Calls + 1 Contains = 2 edges total
|
||||
assert_eq!(code_graph.edges.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cross_file_resolution_with_module_path() {
|
||||
let engine = GraphEngine::new(1000);
|
||||
let node_map = build_test_node_map(&["src/config.rs::load_config", "src/main.rs::main"]);
|
||||
// "crate::config::load_config" should resolve to "src/config.rs::load_config"
|
||||
let result = engine.resolve_edge_target("crate::config::load_config", &node_map);
|
||||
assert!(result.is_some(), "cross-file crate:: path should resolve");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_parent_qname() {
|
||||
let node_map = build_test_node_map(&[
|
||||
"src/main.rs",
|
||||
"src/main.rs::config",
|
||||
"src/main.rs::config::load",
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
find_parent_qname("src/main.rs::config::load", &node_map),
|
||||
Some("src/main.rs::config".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
find_parent_qname("src/main.rs::config", &node_map),
|
||||
Some("src/main.rs".to_string())
|
||||
);
|
||||
assert_eq!(find_parent_qname("src/main.rs", &node_map), None);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user