Add DAST, graph modules, toast notifications, and dashboard enhancements

Add DAST scanning and code knowledge graph features across the stack:
- compliance-dast and compliance-graph workspace crates
- Agent API handlers and routes for DAST targets/scans and graph builds
- Core models and traits for DAST and graph domains
- Dashboard pages for DAST targets/findings/overview and graph explorer/impact
- Toast notification system with auto-dismiss for async action feedback
- Button click animations and disabled states for better UX

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sharang Parnerkar
2026-03-04 13:53:50 +01:00
parent 03ee69834d
commit cea8f59e10
69 changed files with 8745 additions and 54 deletions

View File

@@ -15,6 +15,16 @@ use crate::pipeline::patterns::{GdprPatternScanner, OAuthPatternScanner};
use crate::pipeline::sbom::SbomScanner;
use crate::pipeline::semgrep::SemgrepScanner;
/// Context from graph analysis passed to LLM triage for enhanced filtering
#[derive(Debug)]
#[allow(dead_code)]
pub struct GraphContext {
pub node_count: u32,
pub edge_count: u32,
pub community_count: u32,
pub impacts: Vec<compliance_core::models::graph::ImpactAnalysis>,
}
pub struct PipelineOrchestrator {
config: AgentConfig,
db: Database,
@@ -172,13 +182,30 @@ impl PipelineOrchestrator {
Err(e) => tracing::warn!("[{repo_id}] OAuth pattern scan failed: {e}"),
}
// Stage 5: LLM Triage
// Stage 4.5: Graph Building
tracing::info!("[{repo_id}] Stage 4.5: Graph Building");
self.update_phase(scan_run_id, "graph_building").await;
let graph_context = match self.build_code_graph(&repo_path, &repo_id, &all_findings).await
{
Ok(ctx) => Some(ctx),
Err(e) => {
tracing::warn!("[{repo_id}] Graph building failed: {e}");
None
}
};
// Stage 5: LLM Triage (enhanced with graph context)
tracing::info!(
"[{repo_id}] Stage 5: LLM Triage ({} findings)",
all_findings.len()
);
self.update_phase(scan_run_id, "llm_triage").await;
let triaged = crate::llm::triage::triage_findings(&self.llm, &mut all_findings).await;
let triaged = crate::llm::triage::triage_findings(
&self.llm,
&mut all_findings,
graph_context.as_ref(),
)
.await;
tracing::info!("[{repo_id}] Triaged: {triaged} findings passed confidence threshold");
// Dedup against existing findings and insert new ones
@@ -250,10 +277,121 @@ impl PipelineOrchestrator {
)
.await?;
// Stage 8: DAST (async, optional — only if a DastTarget is configured)
tracing::info!("[{repo_id}] Stage 8: Checking for DAST targets");
self.update_phase(scan_run_id, "dast_scanning").await;
self.maybe_trigger_dast(&repo_id, scan_run_id).await;
tracing::info!("[{repo_id}] Scan complete: {new_count} new findings");
Ok(new_count)
}
/// Build the code knowledge graph for a repo and compute impact analyses
async fn build_code_graph(
&self,
repo_path: &std::path::Path,
repo_id: &str,
findings: &[Finding],
) -> Result<GraphContext, AgentError> {
let graph_build_id = uuid::Uuid::new_v4().to_string();
let engine = compliance_graph::GraphEngine::new(50_000);
let (mut code_graph, build_run) = engine
.build_graph(repo_path, repo_id, &graph_build_id)
.map_err(|e| AgentError::Other(format!("Graph build error: {e}")))?;
// Apply community detection
compliance_graph::graph::community::apply_communities(&mut code_graph);
// Store graph in MongoDB
let store = compliance_graph::graph::persistence::GraphStore::new(self.db.inner());
store
.delete_repo_graph(repo_id)
.await
.map_err(|e| AgentError::Other(format!("Graph cleanup error: {e}")))?;
store
.store_graph(&build_run, &code_graph.nodes, &code_graph.edges)
.await
.map_err(|e| AgentError::Other(format!("Graph store error: {e}")))?;
// Compute impact analysis for each finding
let analyzer = compliance_graph::GraphEngine::impact_analyzer(&code_graph);
let mut impacts = Vec::new();
for finding in findings {
if let Some(file_path) = &finding.file_path {
let impact = analyzer.analyze(
repo_id,
&finding.fingerprint,
&graph_build_id,
file_path,
finding.line_number,
);
store
.store_impact(&impact)
.await
.map_err(|e| AgentError::Other(format!("Impact store error: {e}")))?;
impacts.push(impact);
}
}
Ok(GraphContext {
node_count: build_run.node_count,
edge_count: build_run.edge_count,
community_count: build_run.community_count,
impacts,
})
}
/// Trigger DAST scan if a target is configured for this repo
async fn maybe_trigger_dast(&self, repo_id: &str, scan_run_id: &str) {
use futures_util::TryStreamExt;
let filter = mongodb::bson::doc! { "repo_id": repo_id };
let targets: Vec<compliance_core::models::DastTarget> = match self
.db
.dast_targets()
.find(filter)
.await
{
Ok(cursor) => cursor.try_collect().await.unwrap_or_default(),
Err(_) => return,
};
if targets.is_empty() {
tracing::info!("[{repo_id}] No DAST targets configured, skipping");
return;
}
for target in targets {
let db = self.db.clone();
let scan_run_id = scan_run_id.to_string();
tokio::spawn(async move {
let orchestrator = compliance_dast::DastOrchestrator::new(100);
match orchestrator.run_scan(&target, Vec::new()).await {
Ok((mut scan_run, findings)) => {
scan_run.sast_scan_run_id = Some(scan_run_id);
if let Err(e) = db.dast_scan_runs().insert_one(&scan_run).await {
tracing::error!("Failed to store DAST scan run: {e}");
}
for finding in &findings {
if let Err(e) = db.dast_findings().insert_one(finding).await {
tracing::error!("Failed to store DAST finding: {e}");
}
}
tracing::info!(
"DAST scan complete: {} findings",
findings.len()
);
}
Err(e) => {
tracing::error!("DAST scan failed: {e}");
}
}
});
}
}
async fn update_phase(&self, scan_run_id: &str, phase: &str) {
if let Ok(oid) = mongodb::bson::oid::ObjectId::parse_str(scan_run_id) {
let _ = self