3 Commits

Author SHA1 Message Date
Sharang Parnerkar
b02202fbc8 feat: hourly CVE alerting with notification bell and API
All checks were successful
CI / Check (pull_request) Successful in 9m47s
CI / Detect Changes (pull_request) Has been skipped
CI / Deploy Agent (pull_request) Has been skipped
CI / Deploy Dashboard (pull_request) Has been skipped
CI / Deploy Docs (pull_request) Has been skipped
CI / Deploy MCP (pull_request) Has been skipped
Implements the full CVE alerting pipeline:

CVE Monitor (scheduler.rs):
- Replaces stub monitor_cves with actual OSV.dev scanning of all SBOM entries
- Runs hourly by default (CVE_MONITOR_SCHEDULE, was daily)
- Creates CveNotification for each new CVE (deduped by cve_id+repo+package)
- Updates SBOM entries with discovered vulnerabilities
- Upserts CveAlert records

Notification Model (compliance-core/models/notification.rs):
- CveNotification with status lifecycle: new → read → dismissed
- NotificationSeverity (Low/Medium/High/Critical) from CVSS scores
- parse_severity helper for OSV/NVD severity mapping

API Endpoints (5 new routes):
- GET /api/v1/notifications — List with status/severity/repo filters
- GET /api/v1/notifications/count — Unread count (for badge)
- PATCH /api/v1/notifications/:id/read — Mark as read
- PATCH /api/v1/notifications/:id/dismiss — Dismiss
- POST /api/v1/notifications/read-all — Bulk mark read

Dashboard Notification Bell:
- Floating bell icon (top-right) with unread count badge
- Dropdown panel showing CVE details: severity, CVSS, package, repo, summary
- Dismiss individual notifications
- Auto-marks as read when panel opens
- Polls count every 30 seconds

Also:
- Fix Dockerfile.dashboard: revert to dioxus-cli 0.7.3 --locked
- Add cve_notifications collection with unique + status indexes
- MongoDB indexes for efficient notification queries

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-30 12:32:58 +02:00
4388e98b5b feat: add E2E test suite with nightly CI, fix dashboard Dockerfile (#52)
All checks were successful
CI / Check (push) Has been skipped
CI / Detect Changes (push) Successful in 2s
CI / Deploy Agent (push) Successful in 2s
CI / Deploy Dashboard (push) Successful in 2s
CI / Deploy Docs (push) Has been skipped
CI / Deploy MCP (push) Has been skipped
2026-03-30 10:04:07 +00:00
a8bb05d7b1 feat: add floating help chat widget, remove settings page (#51)
All checks were successful
CI / Check (push) Has been skipped
CI / Detect Changes (push) Successful in 3s
CI / Deploy Agent (push) Successful in 3s
CI / Deploy Dashboard (push) Successful in 2s
CI / Deploy Docs (push) Successful in 2s
CI / Deploy MCP (push) Has been skipped
2026-03-30 08:05:29 +00:00
32 changed files with 1877 additions and 46 deletions

View File

@@ -70,7 +70,7 @@ jobs:
# Tests (reuses compilation artifacts from clippy)
- name: Tests (core + agent)
run: cargo test -p compliance-core -p compliance-agent
run: cargo test -p compliance-core -p compliance-agent --lib
- name: Tests (dashboard server)
run: cargo test -p compliance-dashboard --features server --no-default-features
- name: Tests (dashboard web)

View File

@@ -0,0 +1,52 @@
name: Nightly E2E Tests
on:
schedule:
- cron: '0 3 * * *' # 3 AM UTC daily
workflow_dispatch: # Allow manual trigger
env:
CARGO_TERM_COLOR: always
RUSTFLAGS: "-D warnings"
RUSTC_WRAPPER: /usr/local/bin/sccache
SCCACHE_DIR: /tmp/sccache
TEST_MONGODB_URI: "mongodb://root:example@mongo:27017/?authSource=admin"
concurrency:
group: nightly-e2e
cancel-in-progress: true
jobs:
e2e:
name: E2E Tests
runs-on: docker
container:
image: rust:1.94-bookworm
services:
mongo:
image: mongo:7
env:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: example
steps:
- name: Checkout
run: |
git init
git remote add origin "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git"
git fetch --depth=1 origin "${GITHUB_SHA:-refs/heads/main}"
git checkout FETCH_HEAD
- name: Install sccache
run: |
curl -fsSL https://github.com/mozilla/sccache/releases/download/v0.9.1/sccache-v0.9.1-x86_64-unknown-linux-musl.tar.gz \
| tar xz --strip-components=1 -C /usr/local/bin/ sccache-v0.9.1-x86_64-unknown-linux-musl/sccache
chmod +x /usr/local/bin/sccache
env:
RUSTC_WRAPPER: ""
- name: Run E2E tests
run: cargo test -p compliance-agent --test e2e -- --test-threads=4
- name: Show sccache stats
run: sccache --show-stats
if: always()

View File

@@ -1,6 +1,6 @@
FROM rust:1.94-bookworm AS builder
RUN cargo install dioxus-cli --version 0.7.3
RUN cargo install dioxus-cli --version 0.7.3 --locked
ARG DOCS_URL=/docs

View File

@@ -42,3 +42,14 @@ tokio-tungstenite = { version = "0.26", features = ["rustls-tls-webpki-roots"] }
futures-core = "0.3"
dashmap = { workspace = true }
tokio-stream = { workspace = true }
[dev-dependencies]
compliance-core = { workspace = true, features = ["mongodb"] }
reqwest = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
mongodb = { workspace = true }
uuid = { workspace = true }
secrecy = { workspace = true }
axum = "0.8"
tower-http = { version = "0.6", features = ["cors"] }

View File

@@ -6,6 +6,7 @@ pub mod graph;
pub mod health;
pub mod help_chat;
pub mod issues;
pub mod notifications;
pub mod pentest_handlers;
pub use pentest_handlers as pentest;
pub mod repos;

View File

@@ -0,0 +1,178 @@
use axum::extract::Extension;
use axum::http::StatusCode;
use axum::Json;
use mongodb::bson::doc;
use serde::Deserialize;
use compliance_core::models::notification::CveNotification;
use super::dto::{AgentExt, ApiResponse};
/// GET /api/v1/notifications — List CVE notifications (newest first)
#[tracing::instrument(skip_all)]
pub async fn list_notifications(
Extension(agent): AgentExt,
axum::extract::Query(params): axum::extract::Query<NotificationFilter>,
) -> Result<Json<ApiResponse<Vec<CveNotification>>>, StatusCode> {
let mut filter = doc! {};
// Filter by status (default: show new + read, exclude dismissed)
match params.status.as_deref() {
Some("all") => {}
Some(s) => {
filter.insert("status", s);
}
None => {
filter.insert("status", doc! { "$in": ["new", "read"] });
}
}
// Filter by severity
if let Some(ref sev) = params.severity {
filter.insert("severity", sev.as_str());
}
// Filter by repo
if let Some(ref repo_id) = params.repo_id {
filter.insert("repo_id", repo_id.as_str());
}
let page = params.page.unwrap_or(1).max(1);
let limit = params.limit.unwrap_or(50).min(200);
let skip = (page - 1) * limit as u64;
let total = agent
.db
.cve_notifications()
.count_documents(filter.clone())
.await
.unwrap_or(0);
let notifications: Vec<CveNotification> = match agent
.db
.cve_notifications()
.find(filter)
.sort(doc! { "created_at": -1 })
.skip(skip)
.limit(limit)
.await
{
Ok(cursor) => {
use futures_util::StreamExt;
let mut items = Vec::new();
let mut cursor = cursor;
while let Some(Ok(n)) = cursor.next().await {
items.push(n);
}
items
}
Err(e) => {
tracing::error!("Failed to list notifications: {e}");
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
};
Ok(Json(ApiResponse {
data: notifications,
total: Some(total),
page: Some(page),
}))
}
/// GET /api/v1/notifications/count — Count of unread notifications
#[tracing::instrument(skip_all)]
pub async fn notification_count(
Extension(agent): AgentExt,
) -> Result<Json<serde_json::Value>, StatusCode> {
let count = agent
.db
.cve_notifications()
.count_documents(doc! { "status": "new" })
.await
.unwrap_or(0);
Ok(Json(serde_json::json!({ "count": count })))
}
/// PATCH /api/v1/notifications/:id/read — Mark a notification as read
#[tracing::instrument(skip_all, fields(id = %id))]
pub async fn mark_read(
Extension(agent): AgentExt,
axum::extract::Path(id): axum::extract::Path<String>,
) -> Result<Json<serde_json::Value>, StatusCode> {
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
let result = agent
.db
.cve_notifications()
.update_one(
doc! { "_id": oid },
doc! { "$set": {
"status": "read",
"read_at": mongodb::bson::DateTime::now(),
}},
)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if result.matched_count == 0 {
return Err(StatusCode::NOT_FOUND);
}
Ok(Json(serde_json::json!({ "status": "read" })))
}
/// PATCH /api/v1/notifications/:id/dismiss — Dismiss a notification
#[tracing::instrument(skip_all, fields(id = %id))]
pub async fn dismiss_notification(
Extension(agent): AgentExt,
axum::extract::Path(id): axum::extract::Path<String>,
) -> Result<Json<serde_json::Value>, StatusCode> {
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
let result = agent
.db
.cve_notifications()
.update_one(
doc! { "_id": oid },
doc! { "$set": { "status": "dismissed" } },
)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if result.matched_count == 0 {
return Err(StatusCode::NOT_FOUND);
}
Ok(Json(serde_json::json!({ "status": "dismissed" })))
}
/// POST /api/v1/notifications/read-all — Mark all new notifications as read
#[tracing::instrument(skip_all)]
pub async fn mark_all_read(
Extension(agent): AgentExt,
) -> Result<Json<serde_json::Value>, StatusCode> {
let result = agent
.db
.cve_notifications()
.update_many(
doc! { "status": "new" },
doc! { "$set": {
"status": "read",
"read_at": mongodb::bson::DateTime::now(),
}},
)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
Ok(Json(
serde_json::json!({ "updated": result.modified_count }),
))
}
#[derive(Debug, Deserialize)]
pub struct NotificationFilter {
pub status: Option<String>,
pub severity: Option<String>,
pub repo_id: Option<String>,
pub page: Option<u64>,
pub limit: Option<i64>,
}

View File

@@ -101,6 +101,27 @@ pub fn build_router() -> Router {
)
// Help chat (documentation-grounded Q&A)
.route("/api/v1/help/chat", post(handlers::help_chat::help_chat))
// CVE notification endpoints
.route(
"/api/v1/notifications",
get(handlers::notifications::list_notifications),
)
.route(
"/api/v1/notifications/count",
get(handlers::notifications::notification_count),
)
.route(
"/api/v1/notifications/read-all",
post(handlers::notifications::mark_all_read),
)
.route(
"/api/v1/notifications/{id}/read",
patch(handlers::notifications::mark_read),
)
.route(
"/api/v1/notifications/{id}/dismiss",
patch(handlers::notifications::dismiss_notification),
)
// Pentest API endpoints
.route(
"/api/v1/pentest/lookup-repo",

View File

@@ -42,7 +42,7 @@ pub fn load_config() -> Result<AgentConfig, AgentError> {
.unwrap_or(3001),
scan_schedule: env_var_opt("SCAN_SCHEDULE").unwrap_or_else(|| "0 0 */6 * * *".to_string()),
cve_monitor_schedule: env_var_opt("CVE_MONITOR_SCHEDULE")
.unwrap_or_else(|| "0 0 0 * * *".to_string()),
.unwrap_or_else(|| "0 0 * * * *".to_string()),
git_clone_base_path: env_var_opt("GIT_CLONE_BASE_PATH")
.unwrap_or_else(|| "/tmp/compliance-scanner/repos".to_string()),
ssh_key_path: env_var_opt("SSH_KEY_PATH")

View File

@@ -78,6 +78,25 @@ impl Database {
)
.await?;
// cve_notifications: unique cve_id + repo_id + package, status filter
self.cve_notifications()
.create_index(
IndexModel::builder()
.keys(
doc! { "cve_id": 1, "repo_id": 1, "package_name": 1, "package_version": 1 },
)
.options(IndexOptions::builder().unique(true).build())
.build(),
)
.await?;
self.cve_notifications()
.create_index(
IndexModel::builder()
.keys(doc! { "status": 1, "created_at": -1 })
.build(),
)
.await?;
// tracker_issues: unique finding_id
self.tracker_issues()
.create_index(
@@ -222,6 +241,12 @@ impl Database {
self.inner.collection("cve_alerts")
}
pub fn cve_notifications(
&self,
) -> Collection<compliance_core::models::notification::CveNotification> {
self.inner.collection("cve_notifications")
}
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
self.inner.collection("tracker_issues")
}

View File

@@ -0,0 +1,16 @@
// Library entrypoint — re-exports for integration tests and the binary.
pub mod agent;
pub mod api;
pub mod config;
pub mod database;
pub mod error;
pub mod llm;
pub mod pentest;
pub mod pipeline;
pub mod rag;
pub mod scheduler;
pub mod ssh;
#[allow(dead_code)]
pub mod trackers;
pub mod webhooks;

View File

@@ -1,17 +1,4 @@
mod agent;
mod api;
pub(crate) mod config;
mod database;
mod error;
mod llm;
mod pentest;
mod pipeline;
mod rag;
mod scheduler;
mod ssh;
#[allow(dead_code)]
mod trackers;
mod webhooks;
use compliance_agent::{agent, api, config, database, scheduler, ssh, webhooks};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {

View File

@@ -33,6 +33,7 @@ struct PatternRule {
file_extensions: Vec<String>,
}
#[allow(clippy::new_without_default)]
impl GdprPatternScanner {
pub fn new() -> Self {
let patterns = vec![
@@ -98,6 +99,7 @@ impl Scanner for GdprPatternScanner {
}
}
#[allow(clippy::new_without_default)]
impl OAuthPatternScanner {
pub fn new() -> Self {
let patterns = vec![

View File

@@ -82,24 +82,158 @@ async fn scan_all_repos(agent: &ComplianceAgent) {
}
async fn monitor_cves(agent: &ComplianceAgent) {
use compliance_core::models::notification::{parse_severity, CveNotification};
use compliance_core::models::SbomEntry;
use futures_util::StreamExt;
// Re-scan all SBOM entries for new CVEs
// Fetch all SBOM entries grouped by repo
let cursor = match agent.db.sbom_entries().find(doc! {}).await {
Ok(c) => c,
Err(e) => {
tracing::error!("Failed to list SBOM entries for CVE monitoring: {e}");
tracing::error!("CVE monitor: failed to list SBOM entries: {e}");
return;
}
};
let entries: Vec<_> = cursor.filter_map(|r| async { r.ok() }).collect().await;
let entries: Vec<SbomEntry> = cursor.filter_map(|r| async { r.ok() }).collect().await;
if entries.is_empty() {
tracing::debug!("CVE monitor: no SBOM entries, skipping");
return;
}
tracing::info!("CVE monitor: checking {} dependencies", entries.len());
// The actual CVE checking is handled by the CveScanner in the pipeline
// This is a simplified version that just logs the activity
tracing::info!(
"CVE monitor: checking {} dependencies for new CVEs",
entries.len()
);
// Build a repo_id → repo_name lookup
let repo_ids: std::collections::HashSet<String> =
entries.iter().map(|e| e.repo_id.clone()).collect();
let mut repo_names: std::collections::HashMap<String, String> =
std::collections::HashMap::new();
for rid in &repo_ids {
if let Ok(oid) = mongodb::bson::oid::ObjectId::parse_str(rid) {
if let Ok(Some(repo)) = agent.db.repositories().find_one(doc! { "_id": oid }).await {
repo_names.insert(rid.clone(), repo.name.clone());
}
}
}
// Use the existing CveScanner to query OSV.dev
let nvd_key = agent.config.nvd_api_key.as_ref().map(|k| {
use secrecy::ExposeSecret;
k.expose_secret().to_string()
});
let scanner = crate::pipeline::cve::CveScanner::new(
agent.http.clone(),
agent.config.searxng_url.clone(),
nvd_key,
);
// Group entries by repo for scanning
let mut entries_by_repo: std::collections::HashMap<String, Vec<SbomEntry>> =
std::collections::HashMap::new();
for entry in entries {
entries_by_repo
.entry(entry.repo_id.clone())
.or_default()
.push(entry);
}
let mut new_notifications = 0u32;
for (repo_id, mut repo_entries) in entries_by_repo {
let repo_name = repo_names
.get(&repo_id)
.cloned()
.unwrap_or_else(|| repo_id.clone());
// Scan dependencies for CVEs
let alerts = match scanner.scan_dependencies(&repo_id, &mut repo_entries).await {
Ok(a) => a,
Err(e) => {
tracing::warn!("CVE monitor: scan failed for {repo_name}: {e}");
continue;
}
};
// Upsert CVE alerts (existing logic)
for alert in &alerts {
let filter = doc! { "cve_id": &alert.cve_id, "repo_id": &alert.repo_id };
let update = doc! { "$setOnInsert": mongodb::bson::to_bson(alert).unwrap_or_default() };
let _ = agent
.db
.cve_alerts()
.update_one(filter, update)
.upsert(true)
.await;
}
// Update SBOM entries with discovered vulnerabilities
for entry in &repo_entries {
if entry.known_vulnerabilities.is_empty() {
continue;
}
if let Some(entry_id) = &entry.id {
let _ = agent
.db
.sbom_entries()
.update_one(
doc! { "_id": entry_id },
doc! { "$set": {
"known_vulnerabilities": mongodb::bson::to_bson(&entry.known_vulnerabilities).unwrap_or_default(),
"updated_at": mongodb::bson::DateTime::now(),
}},
)
.await;
}
}
// Create notifications for NEW CVEs (dedup against existing notifications)
for alert in &alerts {
let filter = doc! {
"cve_id": &alert.cve_id,
"repo_id": &alert.repo_id,
"package_name": &alert.affected_package,
"package_version": &alert.affected_version,
};
// Only insert if not already exists (upsert with $setOnInsert)
let severity = parse_severity(alert.severity.as_deref(), alert.cvss_score);
let mut notification = CveNotification::new(
alert.cve_id.clone(),
repo_id.clone(),
repo_name.clone(),
alert.affected_package.clone(),
alert.affected_version.clone(),
severity,
);
notification.cvss_score = alert.cvss_score;
notification.summary = alert.summary.clone();
notification.url = Some(format!("https://osv.dev/vulnerability/{}", alert.cve_id));
let update = doc! {
"$setOnInsert": mongodb::bson::to_bson(&notification).unwrap_or_default()
};
match agent
.db
.cve_notifications()
.update_one(filter, update)
.upsert(true)
.await
{
Ok(result) if result.upserted_id.is_some() => {
new_notifications += 1;
}
Err(e) => {
tracing::warn!("CVE monitor: failed to create notification: {e}");
}
_ => {} // Already exists
}
}
}
if new_notifications > 0 {
tracing::info!("CVE monitor: created {new_notifications} new notification(s)");
} else {
tracing::info!("CVE monitor: no new CVEs found");
}
}

View File

@@ -1,3 +1,165 @@
// Shared test helpers for compliance-agent integration tests.
// Shared test harness for E2E / integration tests.
//
// Add database mocks, fixtures, and test utilities here.
// Spins up the agent API server on a random port with an isolated test
// database. Each test gets a fresh database that is dropped on cleanup.
use std::sync::Arc;
use compliance_agent::agent::ComplianceAgent;
use compliance_agent::api;
use compliance_agent::database::Database;
use compliance_core::AgentConfig;
use secrecy::SecretString;
/// A running test server with a unique database.
pub struct TestServer {
pub base_url: String,
pub client: reqwest::Client,
db_name: String,
mongodb_uri: String,
}
impl TestServer {
/// Start an agent API server on a random port with an isolated database.
pub async fn start() -> Self {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
// Unique database name per test run to avoid collisions
let db_name = format!("test_{}", uuid::Uuid::new_v4().simple());
let db = Database::connect(&mongodb_uri, &db_name)
.await
.expect("Failed to connect to MongoDB — is it running?");
db.ensure_indexes().await.expect("Failed to create indexes");
let config = AgentConfig {
mongodb_uri: mongodb_uri.clone(),
mongodb_database: db_name.clone(),
litellm_url: std::env::var("TEST_LITELLM_URL")
.unwrap_or_else(|_| "http://localhost:4000".into()),
litellm_api_key: SecretString::from(String::new()),
litellm_model: "gpt-4o".into(),
litellm_embed_model: "text-embedding-3-small".into(),
agent_port: 0, // not used — we bind ourselves
scan_schedule: String::new(),
cve_monitor_schedule: String::new(),
git_clone_base_path: "/tmp/compliance-scanner-tests/repos".into(),
ssh_key_path: "/tmp/compliance-scanner-tests/ssh/id_ed25519".into(),
github_token: None,
github_webhook_secret: None,
gitlab_url: None,
gitlab_token: None,
gitlab_webhook_secret: None,
jira_url: None,
jira_email: None,
jira_api_token: None,
jira_project_key: None,
searxng_url: None,
nvd_api_key: None,
keycloak_url: None,
keycloak_realm: None,
keycloak_admin_username: None,
keycloak_admin_password: None,
pentest_verification_email: None,
pentest_imap_host: None,
pentest_imap_port: None,
pentest_imap_tls: false,
pentest_imap_username: None,
pentest_imap_password: None,
};
let agent = ComplianceAgent::new(config, db);
// Build the router with the agent extension
let app = api::routes::build_router()
.layer(axum::extract::Extension(Arc::new(agent)))
.layer(tower_http::cors::CorsLayer::permissive());
// Bind to port 0 to get a random available port
let listener = tokio::net::TcpListener::bind("127.0.0.1:0")
.await
.expect("Failed to bind test server");
let port = listener.local_addr().expect("no local addr").port();
tokio::spawn(async move {
axum::serve(listener, app).await.ok();
});
let base_url = format!("http://127.0.0.1:{port}");
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.expect("Failed to build HTTP client");
// Wait for server to be ready
for _ in 0..50 {
if client
.get(format!("{base_url}/api/v1/health"))
.send()
.await
.is_ok()
{
break;
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
}
Self {
base_url,
client,
db_name,
mongodb_uri,
}
}
/// GET helper
pub async fn get(&self, path: &str) -> reqwest::Response {
self.client
.get(format!("{}{path}", self.base_url))
.send()
.await
.expect("GET request failed")
}
/// POST helper with JSON body
pub async fn post(&self, path: &str, body: &serde_json::Value) -> reqwest::Response {
self.client
.post(format!("{}{path}", self.base_url))
.json(body)
.send()
.await
.expect("POST request failed")
}
/// PATCH helper with JSON body
pub async fn patch(&self, path: &str, body: &serde_json::Value) -> reqwest::Response {
self.client
.patch(format!("{}{path}", self.base_url))
.json(body)
.send()
.await
.expect("PATCH request failed")
}
/// DELETE helper
pub async fn delete(&self, path: &str) -> reqwest::Response {
self.client
.delete(format!("{}{path}", self.base_url))
.send()
.await
.expect("DELETE request failed")
}
/// Get the unique database name for direct MongoDB access in tests.
pub fn db_name(&self) -> &str {
&self.db_name
}
/// Drop the test database on cleanup
pub async fn cleanup(&self) {
if let Ok(client) = mongodb::Client::with_uri_str(&self.mongodb_uri).await {
client.database(&self.db_name).drop().await.ok();
}
}
}

View File

@@ -0,0 +1,7 @@
// E2E test entry point.
//
// Run with: cargo test -p compliance-agent --test e2e
// Requires: MongoDB running (set TEST_MONGODB_URI if not default)
mod common;
mod integration;

View File

@@ -0,0 +1,221 @@
use crate::common::TestServer;
use serde_json::json;
/// Insert a DAST target directly into MongoDB linked to a repo.
async fn insert_dast_target(server: &TestServer, repo_id: &str, name: &str) -> String {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
let result = db
.collection::<mongodb::bson::Document>("dast_targets")
.insert_one(mongodb::bson::doc! {
"name": name,
"base_url": format!("https://{name}.example.com"),
"target_type": "webapp",
"repo_id": repo_id,
"rate_limit": 10,
"allow_destructive": false,
"created_at": mongodb::bson::DateTime::now(),
})
.await
.unwrap();
result.inserted_id.as_object_id().unwrap().to_hex()
}
/// Insert a pentest session linked to a target.
async fn insert_pentest_session(server: &TestServer, target_id: &str, repo_id: &str) -> String {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
let result = db
.collection::<mongodb::bson::Document>("pentest_sessions")
.insert_one(mongodb::bson::doc! {
"target_id": target_id,
"repo_id": repo_id,
"strategy": "comprehensive",
"status": "completed",
"findings_count": 1_i32,
"exploitable_count": 0_i32,
"created_at": mongodb::bson::DateTime::now(),
})
.await
.unwrap();
result.inserted_id.as_object_id().unwrap().to_hex()
}
/// Insert an attack chain node linked to a session.
async fn insert_attack_node(server: &TestServer, session_id: &str) {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
db.collection::<mongodb::bson::Document>("attack_chain_nodes")
.insert_one(mongodb::bson::doc! {
"session_id": session_id,
"node_id": "node-1",
"tool_name": "recon",
"status": "completed",
"created_at": mongodb::bson::DateTime::now(),
})
.await
.unwrap();
}
/// Insert a DAST finding linked to a target.
async fn insert_dast_finding(server: &TestServer, target_id: &str, session_id: &str) {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
db.collection::<mongodb::bson::Document>("dast_findings")
.insert_one(mongodb::bson::doc! {
"scan_run_id": "run-1",
"target_id": target_id,
"vuln_type": "xss",
"title": "Reflected XSS",
"description": "XSS in search param",
"severity": "high",
"endpoint": "https://example.com/search",
"method": "GET",
"exploitable": true,
"evidence": [],
"session_id": session_id,
"created_at": mongodb::bson::DateTime::now(),
})
.await
.unwrap();
}
/// Helper to count documents in a collection
async fn count_docs(server: &TestServer, collection: &str) -> u64 {
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
db.collection::<mongodb::bson::Document>(collection)
.count_documents(mongodb::bson::doc! {})
.await
.unwrap()
}
#[tokio::test]
async fn delete_repo_cascades_to_dast_and_pentest_data() {
let server = TestServer::start().await;
// Create a repo
let resp = server
.post(
"/api/v1/repositories",
&json!({
"name": "cascade-test",
"git_url": "https://github.com/example/cascade-test.git",
}),
)
.await;
let body: serde_json::Value = resp.json().await.unwrap();
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
// Insert DAST target linked to repo
let target_id = insert_dast_target(&server, &repo_id, "cascade-target").await;
// Insert pentest session linked to target
let session_id = insert_pentest_session(&server, &target_id, &repo_id).await;
// Insert downstream data
insert_attack_node(&server, &session_id).await;
insert_dast_finding(&server, &target_id, &session_id).await;
// Verify data exists
assert_eq!(count_docs(&server, "dast_targets").await, 1);
assert_eq!(count_docs(&server, "pentest_sessions").await, 1);
assert_eq!(count_docs(&server, "attack_chain_nodes").await, 1);
assert_eq!(count_docs(&server, "dast_findings").await, 1);
// Delete the repo
let resp = server
.delete(&format!("/api/v1/repositories/{repo_id}"))
.await;
assert_eq!(resp.status(), 200);
// All downstream data should be gone
assert_eq!(count_docs(&server, "dast_targets").await, 0);
assert_eq!(count_docs(&server, "pentest_sessions").await, 0);
assert_eq!(count_docs(&server, "attack_chain_nodes").await, 0);
assert_eq!(count_docs(&server, "dast_findings").await, 0);
server.cleanup().await;
}
#[tokio::test]
async fn delete_repo_cascades_sast_findings_and_sbom() {
let server = TestServer::start().await;
// Create a repo
let resp = server
.post(
"/api/v1/repositories",
&json!({
"name": "sast-cascade",
"git_url": "https://github.com/example/sast-cascade.git",
}),
)
.await;
let body: serde_json::Value = resp.json().await.unwrap();
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
// Insert SAST finding and SBOM entry
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
let now = mongodb::bson::DateTime::now();
db.collection::<mongodb::bson::Document>("findings")
.insert_one(mongodb::bson::doc! {
"repo_id": &repo_id,
"fingerprint": "fp-test-1",
"scanner": "semgrep",
"scan_type": "sast",
"title": "SQL Injection",
"description": "desc",
"severity": "critical",
"status": "open",
"created_at": now,
"updated_at": now,
})
.await
.unwrap();
db.collection::<mongodb::bson::Document>("sbom_entries")
.insert_one(mongodb::bson::doc! {
"repo_id": &repo_id,
"name": "lodash",
"version": "4.17.20",
"package_manager": "npm",
"known_vulnerabilities": [],
})
.await
.unwrap();
assert_eq!(count_docs(&server, "findings").await, 1);
assert_eq!(count_docs(&server, "sbom_entries").await, 1);
// Delete repo
server
.delete(&format!("/api/v1/repositories/{repo_id}"))
.await;
// Both should be gone
assert_eq!(count_docs(&server, "findings").await, 0);
assert_eq!(count_docs(&server, "sbom_entries").await, 0);
server.cleanup().await;
}

View File

@@ -0,0 +1,48 @@
use crate::common::TestServer;
use serde_json::json;
#[tokio::test]
async fn add_and_list_dast_targets() {
let server = TestServer::start().await;
// Initially empty
let resp = server.get("/api/v1/dast/targets").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 0);
// Add a target
let resp = server
.post(
"/api/v1/dast/targets",
&json!({
"name": "test-app",
"base_url": "https://test-app.example.com",
"target_type": "webapp",
}),
)
.await;
assert_eq!(resp.status(), 200);
// List should return 1
let resp = server.get("/api/v1/dast/targets").await;
let body: serde_json::Value = resp.json().await.unwrap();
let targets = body["data"].as_array().unwrap();
assert_eq!(targets.len(), 1);
assert_eq!(targets[0]["name"], "test-app");
assert_eq!(targets[0]["base_url"], "https://test-app.example.com");
server.cleanup().await;
}
#[tokio::test]
async fn list_dast_findings_empty() {
let server = TestServer::start().await;
let resp = server.get("/api/v1/dast/findings").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 0);
server.cleanup().await;
}

View File

@@ -0,0 +1,144 @@
use crate::common::TestServer;
use serde_json::json;
/// Helper: insert a finding directly via MongoDB for testing query endpoints.
async fn insert_finding(server: &TestServer, repo_id: &str, title: &str, severity: &str) {
// We insert via the agent's DB by posting to the internal test path.
// Since there's no direct "create finding" API, we use MongoDB directly.
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
// Extract the database name from the server's unique DB
// We'll use the agent's internal DB through the stats endpoint to verify
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
// Get the DB name from the test server by parsing the health response
// For now, we use a direct insert approach
let db = client.database(&server.db_name());
let now = mongodb::bson::DateTime::now();
db.collection::<mongodb::bson::Document>("findings")
.insert_one(mongodb::bson::doc! {
"repo_id": repo_id,
"fingerprint": format!("fp-{title}-{severity}"),
"scanner": "test-scanner",
"scan_type": "sast",
"title": title,
"description": format!("Test finding: {title}"),
"severity": severity,
"status": "open",
"created_at": now,
"updated_at": now,
})
.await
.unwrap();
}
#[tokio::test]
async fn list_findings_empty() {
let server = TestServer::start().await;
let resp = server.get("/api/v1/findings").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 0);
assert_eq!(body["total"], 0);
server.cleanup().await;
}
#[tokio::test]
async fn list_findings_with_data() {
let server = TestServer::start().await;
insert_finding(&server, "repo1", "SQL Injection", "critical").await;
insert_finding(&server, "repo1", "XSS", "high").await;
insert_finding(&server, "repo2", "Info Leak", "low").await;
let resp = server.get("/api/v1/findings").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["total"], 3);
// Filter by severity
let resp = server.get("/api/v1/findings?severity=critical").await;
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["total"], 1);
assert_eq!(body["data"][0]["title"], "SQL Injection");
// Filter by repo
let resp = server.get("/api/v1/findings?repo_id=repo1").await;
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["total"], 2);
server.cleanup().await;
}
#[tokio::test]
async fn update_finding_status() {
let server = TestServer::start().await;
insert_finding(&server, "repo1", "Test Bug", "medium").await;
// Get the finding ID
let resp = server.get("/api/v1/findings").await;
let body: serde_json::Value = resp.json().await.unwrap();
let finding_id = body["data"][0]["_id"]["$oid"].as_str().unwrap();
// Update status to resolved
let resp = server
.patch(
&format!("/api/v1/findings/{finding_id}/status"),
&json!({ "status": "resolved" }),
)
.await;
assert_eq!(resp.status(), 200);
// Verify it's updated
let resp = server.get(&format!("/api/v1/findings/{finding_id}")).await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"]["status"], "resolved");
server.cleanup().await;
}
#[tokio::test]
async fn bulk_update_finding_status() {
let server = TestServer::start().await;
insert_finding(&server, "repo1", "Bug A", "high").await;
insert_finding(&server, "repo1", "Bug B", "high").await;
// Get both finding IDs
let resp = server.get("/api/v1/findings").await;
let body: serde_json::Value = resp.json().await.unwrap();
let ids: Vec<String> = body["data"]
.as_array()
.unwrap()
.iter()
.map(|f| f["_id"]["$oid"].as_str().unwrap().to_string())
.collect();
// Bulk update
let resp = server
.patch(
"/api/v1/findings/bulk-status",
&json!({
"ids": ids,
"status": "false_positive"
}),
)
.await;
assert_eq!(resp.status(), 200);
// Verify both are updated
for id in &ids {
let resp = server.get(&format!("/api/v1/findings/{id}")).await;
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"]["status"], "false_positive");
}
server.cleanup().await;
}

View File

@@ -0,0 +1,29 @@
use crate::common::TestServer;
#[tokio::test]
async fn health_endpoint_returns_ok() {
let server = TestServer::start().await;
let resp = server.get("/api/v1/health").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["status"], "ok");
server.cleanup().await;
}
#[tokio::test]
async fn stats_overview_returns_zeroes_on_empty_db() {
let server = TestServer::start().await;
let resp = server.get("/api/v1/stats/overview").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
let data = &body["data"];
assert_eq!(data["repositories"], 0);
assert_eq!(data["total_findings"], 0);
server.cleanup().await;
}

View File

@@ -0,0 +1,6 @@
mod cascade_delete;
mod dast;
mod findings;
mod health;
mod repositories;
mod stats;

View File

@@ -0,0 +1,110 @@
use crate::common::TestServer;
use serde_json::json;
#[tokio::test]
async fn add_and_list_repository() {
let server = TestServer::start().await;
// Initially empty
let resp = server.get("/api/v1/repositories").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 0);
// Add a repository
let resp = server
.post(
"/api/v1/repositories",
&json!({
"name": "test-repo",
"git_url": "https://github.com/example/test-repo.git",
}),
)
.await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
let repo_id = body["data"]["id"].as_str().unwrap().to_string();
assert!(!repo_id.is_empty());
// List should now return 1
let resp = server.get("/api/v1/repositories").await;
let body: serde_json::Value = resp.json().await.unwrap();
let repos = body["data"].as_array().unwrap();
assert_eq!(repos.len(), 1);
assert_eq!(repos[0]["name"], "test-repo");
server.cleanup().await;
}
#[tokio::test]
async fn add_duplicate_repository_fails() {
let server = TestServer::start().await;
let payload = json!({
"name": "dup-repo",
"git_url": "https://github.com/example/dup-repo.git",
});
// First add succeeds
let resp = server.post("/api/v1/repositories", &payload).await;
assert_eq!(resp.status(), 200);
// Second add with same git_url should fail (unique index)
let resp = server.post("/api/v1/repositories", &payload).await;
assert_ne!(resp.status(), 200);
server.cleanup().await;
}
#[tokio::test]
async fn delete_repository() {
let server = TestServer::start().await;
// Add a repo
let resp = server
.post(
"/api/v1/repositories",
&json!({
"name": "to-delete",
"git_url": "https://github.com/example/to-delete.git",
}),
)
.await;
let body: serde_json::Value = resp.json().await.unwrap();
let repo_id = body["data"]["id"].as_str().unwrap();
// Delete it
let resp = server
.delete(&format!("/api/v1/repositories/{repo_id}"))
.await;
assert_eq!(resp.status(), 200);
// List should be empty again
let resp = server.get("/api/v1/repositories").await;
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 0);
server.cleanup().await;
}
#[tokio::test]
async fn delete_nonexistent_repository_returns_404() {
let server = TestServer::start().await;
let resp = server
.delete("/api/v1/repositories/000000000000000000000000")
.await;
assert_eq!(resp.status(), 404);
server.cleanup().await;
}
#[tokio::test]
async fn delete_invalid_id_returns_400() {
let server = TestServer::start().await;
let resp = server.delete("/api/v1/repositories/not-a-valid-id").await;
assert_eq!(resp.status(), 400);
server.cleanup().await;
}

View File

@@ -0,0 +1,111 @@
use crate::common::TestServer;
use serde_json::json;
#[tokio::test]
async fn stats_overview_reflects_inserted_data() {
let server = TestServer::start().await;
// Add a repo
server
.post(
"/api/v1/repositories",
&json!({
"name": "stats-repo",
"git_url": "https://github.com/example/stats-repo.git",
}),
)
.await;
// Insert findings directly
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
let now = mongodb::bson::DateTime::now();
for (title, severity) in [
("Critical Bug", "critical"),
("High Bug", "high"),
("Medium Bug", "medium"),
("Low Bug", "low"),
] {
db.collection::<mongodb::bson::Document>("findings")
.insert_one(mongodb::bson::doc! {
"repo_id": "test-repo-id",
"fingerprint": format!("fp-{title}"),
"scanner": "test",
"scan_type": "sast",
"title": title,
"description": "desc",
"severity": severity,
"status": "open",
"created_at": now,
"updated_at": now,
})
.await
.unwrap();
}
let resp = server.get("/api/v1/stats/overview").await;
assert_eq!(resp.status(), 200);
let body: serde_json::Value = resp.json().await.unwrap();
let data = &body["data"];
assert_eq!(data["repositories"], 1);
assert_eq!(data["total_findings"], 4);
assert_eq!(data["critical"], 1);
assert_eq!(data["high"], 1);
server.cleanup().await;
}
#[tokio::test]
async fn stats_update_after_finding_status_change() {
let server = TestServer::start().await;
// Insert a finding
let mongodb_uri = std::env::var("TEST_MONGODB_URI")
.unwrap_or_else(|_| "mongodb://root:example@localhost:27017/?authSource=admin".into());
let client = mongodb::Client::with_uri_str(&mongodb_uri).await.unwrap();
let db = client.database(&server.db_name());
let now = mongodb::bson::DateTime::now();
let result = db
.collection::<mongodb::bson::Document>("findings")
.insert_one(mongodb::bson::doc! {
"repo_id": "repo-1",
"fingerprint": "fp-stats-test",
"scanner": "test",
"scan_type": "sast",
"title": "Stats Test Finding",
"description": "desc",
"severity": "high",
"status": "open",
"created_at": now,
"updated_at": now,
})
.await
.unwrap();
let finding_id = result.inserted_id.as_object_id().unwrap().to_hex();
// Stats should show 1 finding
let resp = server.get("/api/v1/stats/overview").await;
let body: serde_json::Value = resp.json().await.unwrap();
assert_eq!(body["data"]["total_findings"], 1);
// Mark it as resolved
server
.patch(
&format!("/api/v1/findings/{finding_id}/status"),
&json!({ "status": "resolved" }),
)
.await;
// The finding still exists (status changed, not deleted)
let resp = server.get("/api/v1/stats/overview").await;
let body: serde_json::Value = resp.json().await.unwrap();
// total_findings counts all findings regardless of status
assert_eq!(body["data"]["total_findings"], 1);
server.cleanup().await;
}

View File

@@ -1,4 +1,9 @@
// Integration tests for the compliance-agent crate.
// E2E / Integration tests for the compliance-agent API.
//
// Add tests that exercise the full pipeline, API handlers,
// and cross-module interactions here.
// These tests require a running MongoDB instance. Set TEST_MONGODB_URI
// if it's not at the default `mongodb://root:example@localhost:27017`.
//
// Run with: cargo test -p compliance-agent --test e2e
// Or nightly: (via CI with MongoDB service container)
mod api;

View File

@@ -7,6 +7,7 @@ pub mod finding;
pub mod graph;
pub mod issue;
pub mod mcp;
pub mod notification;
pub mod pentest;
pub mod repository;
pub mod sbom;
@@ -27,6 +28,7 @@ pub use graph::{
};
pub use issue::{IssueStatus, TrackerIssue, TrackerType};
pub use mcp::{McpServerConfig, McpServerStatus, McpTransport};
pub use notification::{CveNotification, NotificationSeverity, NotificationStatus};
pub use pentest::{
AttackChainNode, AttackNodeStatus, AuthMode, CodeContextHint, Environment, IdentityProvider,
PentestAuthConfig, PentestConfig, PentestEvent, PentestMessage, PentestSession, PentestStats,

View File

@@ -0,0 +1,103 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// Status of a CVE notification
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum NotificationStatus {
/// Newly created, not yet seen by the user
New,
/// User has seen it (e.g., opened the notification panel)
Read,
/// User has explicitly acknowledged/dismissed it
Dismissed,
}
/// Severity level for notification filtering
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
#[serde(rename_all = "lowercase")]
pub enum NotificationSeverity {
Low,
Medium,
High,
Critical,
}
/// A notification about a newly discovered CVE affecting a tracked dependency.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CveNotification {
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
pub id: Option<bson::oid::ObjectId>,
/// The CVE/GHSA identifier
pub cve_id: String,
/// Repository where the vulnerable dependency is used
pub repo_id: String,
/// Repository name (denormalized for display)
pub repo_name: String,
/// Affected package name
pub package_name: String,
/// Affected version
pub package_version: String,
/// Human-readable severity
pub severity: NotificationSeverity,
/// CVSS score if available
pub cvss_score: Option<f64>,
/// Short summary of the vulnerability
pub summary: Option<String>,
/// Link to vulnerability details
pub url: Option<String>,
/// Notification lifecycle status
pub status: NotificationStatus,
/// When the CVE was first detected for this dependency
#[serde(with = "super::serde_helpers::bson_datetime")]
pub created_at: DateTime<Utc>,
/// When the user last interacted with this notification
pub read_at: Option<DateTime<Utc>>,
}
impl CveNotification {
pub fn new(
cve_id: String,
repo_id: String,
repo_name: String,
package_name: String,
package_version: String,
severity: NotificationSeverity,
) -> Self {
Self {
id: None,
cve_id,
repo_id,
repo_name,
package_name,
package_version,
severity,
cvss_score: None,
summary: None,
url: None,
status: NotificationStatus::New,
created_at: Utc::now(),
read_at: None,
}
}
}
/// Map an OSV/NVD severity string to our notification severity
pub fn parse_severity(s: Option<&str>, cvss: Option<f64>) -> NotificationSeverity {
// Prefer CVSS score if available
if let Some(score) = cvss {
return match score {
s if s >= 9.0 => NotificationSeverity::Critical,
s if s >= 7.0 => NotificationSeverity::High,
s if s >= 4.0 => NotificationSeverity::Medium,
_ => NotificationSeverity::Low,
};
}
// Fall back to string severity
match s.map(|s| s.to_uppercase()).as_deref() {
Some("CRITICAL") => NotificationSeverity::Critical,
Some("HIGH") => NotificationSeverity::High,
Some("MODERATE" | "MEDIUM") => NotificationSeverity::Medium,
_ => NotificationSeverity::Low,
}
}

View File

@@ -3847,3 +3847,33 @@ tbody tr:last-child td {
.help-chat-send:not(:disabled):hover {
background: var(--accent-hover);
}
/* ═══════════════════════════════════════════════════════════════
NOTIFICATION BELL — CVE alert dropdown
═══════════════════════════════════════════════════════════════ */
.notification-bell-wrapper { position: fixed; top: 16px; right: 28px; z-index: 48; }
.notification-bell-btn { position: relative; background: var(--bg-elevated); border: 1px solid var(--border); border-radius: 10px; padding: 8px 10px; color: var(--text-secondary); cursor: pointer; display: flex; align-items: center; transition: color 0.15s, border-color 0.15s; }
.notification-bell-btn:hover { color: var(--text-primary); border-color: var(--border-bright); }
.notification-badge { position: absolute; top: -4px; right: -4px; background: var(--danger); color: #fff; font-size: 10px; font-weight: 700; min-width: 18px; height: 18px; border-radius: 9px; display: flex; align-items: center; justify-content: center; padding: 0 4px; font-family: 'Outfit', sans-serif; }
.notification-panel { position: absolute; top: 44px; right: 0; width: 380px; max-height: 480px; background: var(--bg-secondary); border: 1px solid var(--border-bright); border-radius: 12px; overflow: hidden; box-shadow: 0 12px 48px rgba(0,0,0,0.5); display: flex; flex-direction: column; }
.notification-panel-header { display: flex; align-items: center; justify-content: space-between; padding: 12px 16px; border-bottom: 1px solid var(--border); font-family: 'Outfit', sans-serif; font-weight: 600; font-size: 14px; color: var(--text-primary); }
.notification-close-btn { background: none; border: none; color: var(--text-secondary); cursor: pointer; padding: 2px; }
.notification-panel-body { overflow-y: auto; flex: 1; padding: 8px; }
.notification-loading, .notification-empty { display: flex; flex-direction: column; align-items: center; justify-content: center; padding: 32px 16px; color: var(--text-secondary); font-size: 13px; gap: 8px; }
.notification-item { padding: 10px 12px; border-radius: 8px; margin-bottom: 4px; background: var(--bg-card); border: 1px solid var(--border); transition: border-color 0.15s; }
.notification-item:hover { border-color: var(--border-bright); }
.notification-item-header { display: flex; align-items: center; gap: 8px; margin-bottom: 4px; }
.notification-sev { font-size: 10px; font-weight: 700; padding: 2px 6px; border-radius: 4px; text-transform: uppercase; letter-spacing: 0.5px; font-family: 'Outfit', sans-serif; }
.notification-sev.sev-critical { background: var(--danger-bg); color: var(--danger); }
.notification-sev.sev-high { background: rgba(255,140,0,0.12); color: #ff8c00; }
.notification-sev.sev-medium { background: var(--warning-bg); color: var(--warning); }
.notification-sev.sev-low { background: rgba(0,200,255,0.08); color: var(--accent); }
.notification-cve-id { font-size: 12px; font-weight: 600; color: var(--text-primary); font-family: 'JetBrains Mono', monospace; }
.notification-cve-id a { color: var(--accent); text-decoration: none; }
.notification-cve-id a:hover { text-decoration: underline; }
.notification-cvss { font-size: 10px; color: var(--text-secondary); margin-left: auto; font-family: 'JetBrains Mono', monospace; }
.notification-dismiss-btn { background: none; border: none; color: var(--text-tertiary); cursor: pointer; padding: 2px; margin-left: 4px; }
.notification-dismiss-btn:hover { color: var(--danger); }
.notification-item-pkg { font-size: 12px; color: var(--text-primary); font-family: 'JetBrains Mono', monospace; }
.notification-item-repo { font-size: 11px; color: var(--text-secondary); margin-bottom: 4px; }
.notification-item-summary { font-size: 11px; color: var(--text-secondary); line-height: 1.4; display: -webkit-box; -webkit-line-clamp: 2; -webkit-box-orient: vertical; overflow: hidden; }

View File

@@ -2,6 +2,7 @@ use dioxus::prelude::*;
use crate::app::Route;
use crate::components::help_chat::HelpChat;
use crate::components::notification_bell::NotificationBell;
use crate::components::sidebar::Sidebar;
use crate::components::toast::{ToastContainer, Toasts};
use crate::infrastructure::auth_check::check_auth;
@@ -21,6 +22,7 @@ pub fn AppShell() -> Element {
main { class: "main-content",
Outlet::<Route> {}
}
NotificationBell {}
ToastContainer {}
HelpChat {}
}

View File

@@ -4,6 +4,7 @@ pub mod code_inspector;
pub mod code_snippet;
pub mod file_tree;
pub mod help_chat;
pub mod notification_bell;
pub mod page_header;
pub mod pagination;
pub mod pentest_wizard;

View File

@@ -0,0 +1,155 @@
use dioxus::prelude::*;
use dioxus_free_icons::icons::bs_icons::*;
use dioxus_free_icons::Icon;
use crate::infrastructure::notifications::{
dismiss_notification, fetch_notification_count, fetch_notifications,
mark_all_notifications_read,
};
#[component]
pub fn NotificationBell() -> Element {
let mut is_open = use_signal(|| false);
let mut count = use_signal(|| 0u64);
let mut notifications = use_signal(Vec::new);
let mut is_loading = use_signal(|| false);
// Poll notification count every 30 seconds
use_resource(move || async move {
loop {
if let Ok(c) = fetch_notification_count().await {
count.set(c);
}
#[cfg(feature = "web")]
{
gloo_timers::future::TimeoutFuture::new(30_000).await;
}
#[cfg(not(feature = "web"))]
{
tokio::time::sleep(std::time::Duration::from_secs(30)).await;
}
}
});
// Load notifications when panel opens
let load_notifications = move |_| {
is_open.set(!is_open());
if !is_open() {
return;
}
is_loading.set(true);
spawn(async move {
if let Ok(resp) = fetch_notifications().await {
notifications.set(resp.data);
}
// Mark all as read when panel opens
let _ = mark_all_notifications_read().await;
count.set(0);
is_loading.set(false);
});
};
let on_dismiss = move |id: String| {
spawn(async move {
let _ = dismiss_notification(id.clone()).await;
notifications.write().retain(|n| {
n.id.as_ref()
.and_then(|v| v.get("$oid"))
.and_then(|v| v.as_str())
!= Some(&id)
});
});
};
rsx! {
div { class: "notification-bell-wrapper",
// Bell button
button {
class: "notification-bell-btn",
onclick: load_notifications,
title: "CVE Alerts",
Icon { icon: BsBell, width: 18, height: 18 }
if count() > 0 {
span { class: "notification-badge", "{count()}" }
}
}
// Dropdown panel
if is_open() {
div { class: "notification-panel",
div { class: "notification-panel-header",
span { "CVE Alerts" }
button {
class: "notification-close-btn",
onclick: move |_| is_open.set(false),
Icon { icon: BsX, width: 16, height: 16 }
}
}
div { class: "notification-panel-body",
if is_loading() {
div { class: "notification-loading", "Loading..." }
} else if notifications().is_empty() {
div { class: "notification-empty",
Icon { icon: BsShieldCheck, width: 32, height: 32 }
p { "No CVE alerts" }
}
} else {
for notif in notifications().iter() {
{
let id = notif.id.as_ref()
.and_then(|v| v.get("$oid"))
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let sev_class = match notif.severity.as_str() {
"critical" => "sev-critical",
"high" => "sev-high",
"medium" => "sev-medium",
_ => "sev-low",
};
let dismiss_id = id.clone();
rsx! {
div { class: "notification-item",
div { class: "notification-item-header",
span { class: "notification-sev {sev_class}",
"{notif.severity.to_uppercase()}"
}
span { class: "notification-cve-id",
if let Some(ref url) = notif.url {
a { href: "{url}", target: "_blank", "{notif.cve_id}" }
} else {
"{notif.cve_id}"
}
}
if let Some(score) = notif.cvss_score {
span { class: "notification-cvss", "CVSS {score:.1}" }
}
button {
class: "notification-dismiss-btn",
title: "Dismiss",
onclick: move |_| on_dismiss(dismiss_id.clone()),
Icon { icon: BsXCircle, width: 14, height: 14 }
}
}
div { class: "notification-item-pkg",
"{notif.package_name} {notif.package_version}"
}
div { class: "notification-item-repo",
"{notif.repo_name}"
}
if let Some(ref summary) = notif.summary {
div { class: "notification-item-summary",
"{summary}"
}
}
}
}
}
}
}
}
}
}
}
}
}

View File

@@ -8,6 +8,7 @@ pub mod graph;
pub mod help_chat;
pub mod issues;
pub mod mcp;
pub mod notifications;
pub mod pentest;
#[allow(clippy::too_many_arguments)]
pub mod repositories;

View File

@@ -0,0 +1,91 @@
use dioxus::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct NotificationListResponse {
pub data: Vec<CveNotificationData>,
#[serde(default)]
pub total: Option<u64>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct CveNotificationData {
#[serde(rename = "_id")]
pub id: Option<serde_json::Value>,
pub cve_id: String,
pub repo_name: String,
pub package_name: String,
pub package_version: String,
pub severity: String,
pub cvss_score: Option<f64>,
pub summary: Option<String>,
pub url: Option<String>,
pub status: String,
#[serde(default)]
pub created_at: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct NotificationCountResponse {
pub count: u64,
}
#[server]
pub async fn fetch_notification_count() -> Result<u64, ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!("{}/api/v1/notifications/count", state.agent_api_url);
let resp = reqwest::get(&url)
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
let body: NotificationCountResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
Ok(body.count)
}
#[server]
pub async fn fetch_notifications() -> Result<NotificationListResponse, ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!("{}/api/v1/notifications?limit=20", state.agent_api_url);
let resp = reqwest::get(&url)
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
let body: NotificationListResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
Ok(body)
}
#[server]
pub async fn mark_all_notifications_read() -> Result<(), ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!("{}/api/v1/notifications/read-all", state.agent_api_url);
reqwest::Client::new()
.post(&url)
.send()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
Ok(())
}
#[server]
pub async fn dismiss_notification(id: String) -> Result<(), ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!("{}/api/v1/notifications/{id}/dismiss", state.agent_api_url);
reqwest::Client::new()
.patch(&url)
.send()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
Ok(())
}

View File

@@ -15,6 +15,30 @@ use crate::parsers::registry::ParserRegistry;
use super::community::detect_communities;
use super::impact::ImpactAnalyzer;
/// Walk up the qualified-name hierarchy to find the closest ancestor
/// that exists in the node map.
///
/// For `"src/main.rs::config::load"` this tries:
/// 1. `"src/main.rs::config"` (trim last `::` segment)
/// 2. `"src/main.rs"` (trim again)
///
/// Returns the first match found, or `None` if the node is a root.
fn find_parent_qname(qname: &str, node_map: &HashMap<String, NodeIndex>) -> Option<String> {
let mut current = qname.to_string();
loop {
// Try stripping the last "::" segment
if let Some(pos) = current.rfind("::") {
current.truncate(pos);
if node_map.contains_key(&current) {
return Some(current);
}
continue;
}
// No more "::" — this is a top-level node (file), no parent
return None;
}
}
/// The main graph engine that builds and manages code knowledge graphs
pub struct GraphEngine {
parser_registry: ParserRegistry,
@@ -89,7 +113,12 @@ impl GraphEngine {
Ok((code_graph, build_run))
}
/// Build petgraph from parsed output, resolving edges to node indices
/// Build petgraph from parsed output, resolving edges to node indices.
///
/// After resolving the explicit edges from parsers, we synthesise
/// `Contains` edges so that every node is reachable from its parent
/// file or module. This eliminates disconnected "islands" that
/// otherwise appear when files share no direct call/import edges.
fn build_petgraph(&self, parse_output: ParseOutput) -> Result<CodeGraph, CoreError> {
let mut graph = DiGraph::new();
let mut node_map: HashMap<String, NodeIndex> = HashMap::new();
@@ -102,15 +131,13 @@ impl GraphEngine {
node_map.insert(node.qualified_name.clone(), idx);
}
// Resolve and add edges — rewrite target to the resolved qualified name
// so the persisted edge references match node qualified_names.
// Resolve and add explicit edges from parsers
let mut resolved_edges = Vec::new();
for mut edge in parse_output.edges {
let source_idx = node_map.get(&edge.source);
let resolved = self.resolve_edge_target(&edge.target, &node_map);
if let (Some(&src), Some(tgt)) = (source_idx, resolved) {
// Update target to the resolved qualified name
let resolved_name = node_map
.iter()
.find(|(_, &idx)| idx == tgt)
@@ -121,7 +148,48 @@ impl GraphEngine {
graph.add_edge(src, tgt, edge.kind.clone());
resolved_edges.push(edge);
}
// Skip unresolved edges (cross-file, external deps) — conservative approach
}
// Synthesise Contains edges: connect each node to its closest
// parent in the qualified-name hierarchy.
//
// For "src/main.rs::config::load", the parent chain is:
// "src/main.rs::config" → "src/main.rs"
//
// We walk up the qualified name (splitting on "::") and link to
// the first ancestor that exists in the node map.
let repo_id = nodes.first().map(|n| n.repo_id.as_str()).unwrap_or("");
let build_id = nodes
.first()
.map(|n| n.graph_build_id.as_str())
.unwrap_or("");
let qualified_names: Vec<String> = nodes.iter().map(|n| n.qualified_name.clone()).collect();
let file_paths: HashMap<String, String> = nodes
.iter()
.map(|n| (n.qualified_name.clone(), n.file_path.clone()))
.collect();
for qname in &qualified_names {
if let Some(parent_qname) = find_parent_qname(qname, &node_map) {
let child_idx = node_map[qname];
let parent_idx = node_map[&parent_qname];
// Avoid duplicate edges
if !graph.contains_edge(parent_idx, child_idx) {
graph.add_edge(parent_idx, child_idx, CodeEdgeKind::Contains);
resolved_edges.push(CodeEdge {
id: None,
repo_id: repo_id.to_string(),
graph_build_id: build_id.to_string(),
source: parent_qname,
target: qname.clone(),
kind: CodeEdgeKind::Contains,
file_path: file_paths.get(qname).cloned().unwrap_or_default(),
line_number: None,
});
}
}
}
Ok(CodeGraph {
@@ -132,33 +200,62 @@ impl GraphEngine {
})
}
/// Try to resolve an edge target to a known node
/// Try to resolve an edge target to a known node.
///
/// Resolution strategies (in order):
/// 1. Direct qualified-name match
/// 2. Suffix match: "foo" matches "src/main.rs::mod::foo"
/// 3. Module-path match: "config::load" matches "src/config.rs::load"
/// 4. Self-method: "self.method" matches "::method"
fn resolve_edge_target(
&self,
target: &str,
node_map: &HashMap<String, NodeIndex>,
) -> Option<NodeIndex> {
// Direct match
// 1. Direct match
if let Some(idx) = node_map.get(target) {
return Some(*idx);
}
// Try matching just the function/type name (intra-file resolution)
// 2. Suffix match: "foo" → "path/file.rs::foo"
let suffix_pattern = format!("::{target}");
let dot_pattern = format!(".{target}");
for (qualified, idx) in node_map {
// Match "foo" to "path/file.rs::foo" or "path/file.rs::Type::foo"
if qualified.ends_with(&format!("::{target}"))
|| qualified.ends_with(&format!(".{target}"))
{
if qualified.ends_with(&suffix_pattern) || qualified.ends_with(&dot_pattern) {
return Some(*idx);
}
}
// Try matching method calls like "self.method" -> look for "::method"
// 3. Module-path match: "config::load" → try matching the last N
// segments of the target against node qualified names.
// This handles cross-file calls like `crate::config::load` or
// `super::handlers::process` where the prefix differs.
if target.contains("::") {
// Strip common Rust path prefixes
let stripped = target
.strip_prefix("crate::")
.or_else(|| target.strip_prefix("super::"))
.or_else(|| target.strip_prefix("self::"))
.unwrap_or(target);
let segments: Vec<&str> = stripped.split("::").collect();
// Try matching progressively shorter suffixes
for start in 0..segments.len() {
let suffix = segments[start..].join("::");
let pattern = format!("::{suffix}");
for (qualified, idx) in node_map {
if qualified.ends_with(&pattern) {
return Some(*idx);
}
}
}
}
// 4. Self-method: "self.method" → "::method"
if let Some(method_name) = target.strip_prefix("self.") {
let pattern = format!("::{method_name}");
for (qualified, idx) in node_map {
if qualified.ends_with(&format!("::{method_name}"))
|| qualified.ends_with(&format!(".{method_name}"))
{
if qualified.ends_with(&pattern) {
return Some(*idx);
}
}
@@ -353,4 +450,83 @@ mod tests {
assert!(code_graph.node_map.contains_key("a::c"));
assert!(code_graph.node_map.contains_key("a::d"));
}
#[test]
fn test_contains_edges_synthesised() {
let engine = GraphEngine::new(1000);
let mut output = ParseOutput::default();
// File → Module → Function hierarchy
output.nodes.push(make_node("src/main.rs"));
output.nodes.push(make_node("src/main.rs::config"));
output.nodes.push(make_node("src/main.rs::config::load"));
let code_graph = engine.build_petgraph(output).unwrap();
// Should have 2 Contains edges:
// src/main.rs → src/main.rs::config
// src/main.rs::config → src/main.rs::config::load
let contains_edges: Vec<_> = code_graph
.edges
.iter()
.filter(|e| matches!(e.kind, CodeEdgeKind::Contains))
.collect();
assert_eq!(contains_edges.len(), 2, "expected 2 Contains edges");
let sources: Vec<&str> = contains_edges.iter().map(|e| e.source.as_str()).collect();
assert!(sources.contains(&"src/main.rs"));
assert!(sources.contains(&"src/main.rs::config"));
}
#[test]
fn test_contains_edges_no_duplicates_with_existing_edges() {
let engine = GraphEngine::new(1000);
let mut output = ParseOutput::default();
output.nodes.push(make_node("src/main.rs"));
output.nodes.push(make_node("src/main.rs::foo"));
// Explicit Calls edge (foo calls itself? just for testing)
output.edges.push(CodeEdge {
id: None,
repo_id: "test".to_string(),
graph_build_id: "build1".to_string(),
source: "src/main.rs::foo".to_string(),
target: "src/main.rs::foo".to_string(),
kind: CodeEdgeKind::Calls,
file_path: "src/main.rs".to_string(),
line_number: Some(1),
});
let code_graph = engine.build_petgraph(output).unwrap();
// 1 Calls + 1 Contains = 2 edges total
assert_eq!(code_graph.edges.len(), 2);
}
#[test]
fn test_cross_file_resolution_with_module_path() {
let engine = GraphEngine::new(1000);
let node_map = build_test_node_map(&["src/config.rs::load_config", "src/main.rs::main"]);
// "crate::config::load_config" should resolve to "src/config.rs::load_config"
let result = engine.resolve_edge_target("crate::config::load_config", &node_map);
assert!(result.is_some(), "cross-file crate:: path should resolve");
}
#[test]
fn test_find_parent_qname() {
let node_map = build_test_node_map(&[
"src/main.rs",
"src/main.rs::config",
"src/main.rs::config::load",
]);
assert_eq!(
find_parent_qname("src/main.rs::config::load", &node_map),
Some("src/main.rs::config".to_string())
);
assert_eq!(
find_parent_qname("src/main.rs::config", &node_map),
Some("src/main.rs".to_string())
);
assert_eq!(find_parent_qname("src/main.rs", &node_map), None);
}
}