Add a complete AI pentest system where Claude autonomously drives security testing via tool-calling. The LLM selects from 16 tools, chains results, and builds an attack chain DAG. Core: - PentestTool trait (dyn-compatible) with PentestToolContext/Result - PentestSession, AttackChainNode, PentestMessage, PentestEvent models - 10 new DastVulnType variants (DNS, DMARC, TLS, cookies, CSP, CORS, etc.) - LLM client chat_with_tools() for OpenAI-compatible tool calling Tools (16 total): - 5 agent wrappers: SQL injection, XSS, auth bypass, SSRF, API fuzzer - 11 new infra tools: DNS checker, DMARC checker, TLS analyzer, security headers, cookie analyzer, CSP analyzer, rate limit tester, console log detector, CORS checker, OpenAPI parser, recon - ToolRegistry for tool lookup and LLM definition generation Orchestrator: - PentestOrchestrator with iterative tool-calling loop (max 50 rounds) - Attack chain node recording per tool invocation - SSE event broadcasting for real-time progress - Strategy-aware system prompts (quick/comprehensive/targeted/aggressive/stealth) API (9 endpoints): - POST/GET /pentest/sessions, GET /pentest/sessions/:id - POST /pentest/sessions/:id/chat, GET /pentest/sessions/:id/stream - GET /pentest/sessions/:id/attack-chain, messages, findings - GET /pentest/stats Dashboard: - Pentest dashboard with stat cards, severity distribution, session list - Chat-based session page with split layout (chat + findings/attack chain) - Inline tool execution indicators, auto-polling, new session modal - Sidebar navigation item Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
293 lines
9.0 KiB
Rust
293 lines
9.0 KiB
Rust
use mongodb::bson::doc;
|
|
use mongodb::options::IndexOptions;
|
|
use mongodb::{Client, Collection, IndexModel};
|
|
|
|
use compliance_core::models::*;
|
|
|
|
use crate::error::AgentError;
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct Database {
|
|
inner: mongodb::Database,
|
|
}
|
|
|
|
impl Database {
|
|
pub async fn connect(uri: &str, db_name: &str) -> Result<Self, AgentError> {
|
|
let client = Client::with_uri_str(uri).await?;
|
|
let db = client.database(db_name);
|
|
db.run_command(doc! { "ping": 1 }).await?;
|
|
tracing::info!("Connected to MongoDB database '{db_name}'");
|
|
Ok(Self { inner: db })
|
|
}
|
|
|
|
pub async fn ensure_indexes(&self) -> Result<(), AgentError> {
|
|
// repositories: unique git_url
|
|
self.repositories()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "git_url": 1 })
|
|
.options(IndexOptions::builder().unique(true).build())
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// findings: unique fingerprint
|
|
self.findings()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "fingerprint": 1 })
|
|
.options(IndexOptions::builder().unique(true).build())
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// findings: repo_id + severity compound
|
|
self.findings()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "severity": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// scan_runs: repo_id + started_at descending
|
|
self.scan_runs()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "started_at": -1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// sbom_entries: compound
|
|
self.sbom_entries()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "name": 1, "version": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// cve_alerts: unique cve_id + repo_id
|
|
self.cve_alerts()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "cve_id": 1, "repo_id": 1 })
|
|
.options(IndexOptions::builder().unique(true).build())
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// tracker_issues: unique finding_id
|
|
self.tracker_issues()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "finding_id": 1 })
|
|
.options(IndexOptions::builder().unique(true).build())
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// graph_nodes: compound (repo_id, graph_build_id)
|
|
self.graph_nodes()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "graph_build_id": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// graph_edges: compound (repo_id, graph_build_id)
|
|
self.graph_edges()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "graph_build_id": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// graph_builds: compound (repo_id, started_at DESC)
|
|
self.graph_builds()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "started_at": -1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// impact_analyses: unique (repo_id, finding_id)
|
|
self.impact_analyses()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "finding_id": 1 })
|
|
.options(IndexOptions::builder().unique(true).build())
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// dast_targets: index on repo_id
|
|
self.dast_targets()
|
|
.create_index(IndexModel::builder().keys(doc! { "repo_id": 1 }).build())
|
|
.await?;
|
|
|
|
// dast_scan_runs: compound (target_id, started_at DESC)
|
|
self.dast_scan_runs()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "target_id": 1, "started_at": -1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// dast_findings: compound (scan_run_id, vuln_type)
|
|
self.dast_findings()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "scan_run_id": 1, "vuln_type": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// code_embeddings: compound (repo_id, graph_build_id)
|
|
self.code_embeddings()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "graph_build_id": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// embedding_builds: compound (repo_id, started_at DESC)
|
|
self.embedding_builds()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "repo_id": 1, "started_at": -1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// pentest_sessions: compound (target_id, started_at DESC)
|
|
self.pentest_sessions()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "target_id": 1, "started_at": -1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// pentest_sessions: status index
|
|
self.pentest_sessions()
|
|
.create_index(IndexModel::builder().keys(doc! { "status": 1 }).build())
|
|
.await?;
|
|
|
|
// attack_chain_nodes: compound (session_id, node_id)
|
|
self.attack_chain_nodes()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "session_id": 1, "node_id": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
// pentest_messages: compound (session_id, created_at)
|
|
self.pentest_messages()
|
|
.create_index(
|
|
IndexModel::builder()
|
|
.keys(doc! { "session_id": 1, "created_at": 1 })
|
|
.build(),
|
|
)
|
|
.await?;
|
|
|
|
tracing::info!("Database indexes ensured");
|
|
Ok(())
|
|
}
|
|
|
|
pub fn repositories(&self) -> Collection<TrackedRepository> {
|
|
self.inner.collection("repositories")
|
|
}
|
|
|
|
pub fn findings(&self) -> Collection<Finding> {
|
|
self.inner.collection("findings")
|
|
}
|
|
|
|
pub fn scan_runs(&self) -> Collection<ScanRun> {
|
|
self.inner.collection("scan_runs")
|
|
}
|
|
|
|
pub fn sbom_entries(&self) -> Collection<SbomEntry> {
|
|
self.inner.collection("sbom_entries")
|
|
}
|
|
|
|
pub fn cve_alerts(&self) -> Collection<CveAlert> {
|
|
self.inner.collection("cve_alerts")
|
|
}
|
|
|
|
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
|
|
self.inner.collection("tracker_issues")
|
|
}
|
|
|
|
// Graph collections
|
|
pub fn graph_nodes(&self) -> Collection<compliance_core::models::graph::CodeNode> {
|
|
self.inner.collection("graph_nodes")
|
|
}
|
|
|
|
pub fn graph_edges(&self) -> Collection<compliance_core::models::graph::CodeEdge> {
|
|
self.inner.collection("graph_edges")
|
|
}
|
|
|
|
pub fn graph_builds(&self) -> Collection<compliance_core::models::graph::GraphBuildRun> {
|
|
self.inner.collection("graph_builds")
|
|
}
|
|
|
|
pub fn impact_analyses(&self) -> Collection<compliance_core::models::graph::ImpactAnalysis> {
|
|
self.inner.collection("impact_analyses")
|
|
}
|
|
|
|
// DAST collections
|
|
pub fn dast_targets(&self) -> Collection<DastTarget> {
|
|
self.inner.collection("dast_targets")
|
|
}
|
|
|
|
pub fn dast_scan_runs(&self) -> Collection<DastScanRun> {
|
|
self.inner.collection("dast_scan_runs")
|
|
}
|
|
|
|
pub fn dast_findings(&self) -> Collection<DastFinding> {
|
|
self.inner.collection("dast_findings")
|
|
}
|
|
|
|
// Embedding collections
|
|
pub fn code_embeddings(&self) -> Collection<compliance_core::models::embedding::CodeEmbedding> {
|
|
self.inner.collection("code_embeddings")
|
|
}
|
|
|
|
pub fn embedding_builds(
|
|
&self,
|
|
) -> Collection<compliance_core::models::embedding::EmbeddingBuildRun> {
|
|
self.inner.collection("embedding_builds")
|
|
}
|
|
|
|
// Pentest collections
|
|
pub fn pentest_sessions(&self) -> Collection<PentestSession> {
|
|
self.inner.collection("pentest_sessions")
|
|
}
|
|
|
|
pub fn attack_chain_nodes(&self) -> Collection<AttackChainNode> {
|
|
self.inner.collection("attack_chain_nodes")
|
|
}
|
|
|
|
pub fn pentest_messages(&self) -> Collection<PentestMessage> {
|
|
self.inner.collection("pentest_messages")
|
|
}
|
|
|
|
#[allow(dead_code)]
|
|
pub fn raw_collection(&self, name: &str) -> Collection<mongodb::bson::Document> {
|
|
self.inner.collection(name)
|
|
}
|
|
|
|
/// Get the raw MongoDB database handle (for graph persistence)
|
|
pub fn inner(&self) -> &mongodb::Database {
|
|
&self.inner
|
|
}
|
|
}
|