All checks were successful
CI / Tests (push) Successful in 5m17s
CI / Detect Changes (push) Successful in 3s
CI / Deploy Agent (push) Successful in 3s
CI / Deploy Dashboard (push) Has been skipped
CI / Deploy Docs (push) Has been skipped
CI / Deploy MCP (push) Has been skipped
CI / Format (push) Successful in 4s
CI / Clippy (push) Successful in 4m38s
CI / Security Audit (push) Successful in 1m50s
Add repo_id, finding_id, and filter fields to tracing::instrument attributes for better trace correlation in SigNoz. Replace all silently swallowed errors (Err(_) => Vec::new()) with tracing::warn! logging across mod.rs, dast.rs, graph.rs handlers. Add stage-level spans with .instrument() to pipeline orchestrator for visibility into scan phases. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
240 lines
6.7 KiB
Rust
240 lines
6.7 KiB
Rust
use std::sync::Arc;
|
|
|
|
use axum::extract::{Extension, Path, Query};
|
|
use axum::http::StatusCode;
|
|
use axum::Json;
|
|
use mongodb::bson::doc;
|
|
use serde::Deserialize;
|
|
|
|
use compliance_core::models::dast::{DastFinding, DastScanRun, DastTarget, DastTargetType};
|
|
|
|
use crate::agent::ComplianceAgent;
|
|
|
|
use super::{collect_cursor_async, ApiResponse, PaginationParams};
|
|
|
|
type AgentExt = Extension<Arc<ComplianceAgent>>;
|
|
|
|
#[derive(Deserialize)]
|
|
pub struct AddTargetRequest {
|
|
pub name: String,
|
|
pub base_url: String,
|
|
#[serde(default = "default_target_type")]
|
|
pub target_type: DastTargetType,
|
|
pub repo_id: Option<String>,
|
|
#[serde(default)]
|
|
pub excluded_paths: Vec<String>,
|
|
#[serde(default = "default_crawl_depth")]
|
|
pub max_crawl_depth: u32,
|
|
#[serde(default = "default_rate_limit")]
|
|
pub rate_limit: u32,
|
|
#[serde(default)]
|
|
pub allow_destructive: bool,
|
|
}
|
|
|
|
fn default_target_type() -> DastTargetType {
|
|
DastTargetType::WebApp
|
|
}
|
|
fn default_crawl_depth() -> u32 {
|
|
5
|
|
}
|
|
fn default_rate_limit() -> u32 {
|
|
10
|
|
}
|
|
|
|
/// GET /api/v1/dast/targets — List DAST targets
|
|
#[tracing::instrument(skip_all)]
|
|
pub async fn list_targets(
|
|
Extension(agent): AgentExt,
|
|
Query(params): Query<PaginationParams>,
|
|
) -> Result<Json<ApiResponse<Vec<DastTarget>>>, StatusCode> {
|
|
let db = &agent.db;
|
|
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
|
let total = db
|
|
.dast_targets()
|
|
.count_documents(doc! {})
|
|
.await
|
|
.unwrap_or(0);
|
|
|
|
let targets = match db
|
|
.dast_targets()
|
|
.find(doc! {})
|
|
.skip(skip)
|
|
.limit(params.limit)
|
|
.await
|
|
{
|
|
Ok(cursor) => collect_cursor_async(cursor).await,
|
|
Err(e) => {
|
|
tracing::warn!("Failed to fetch DAST targets: {e}");
|
|
Vec::new()
|
|
}
|
|
};
|
|
|
|
Ok(Json(ApiResponse {
|
|
data: targets,
|
|
total: Some(total),
|
|
page: Some(params.page),
|
|
}))
|
|
}
|
|
|
|
/// POST /api/v1/dast/targets — Add a new DAST target
|
|
#[tracing::instrument(skip_all)]
|
|
pub async fn add_target(
|
|
Extension(agent): AgentExt,
|
|
Json(req): Json<AddTargetRequest>,
|
|
) -> Result<Json<ApiResponse<DastTarget>>, StatusCode> {
|
|
let mut target = DastTarget::new(req.name, req.base_url, req.target_type);
|
|
target.repo_id = req.repo_id;
|
|
target.excluded_paths = req.excluded_paths;
|
|
target.max_crawl_depth = req.max_crawl_depth;
|
|
target.rate_limit = req.rate_limit;
|
|
target.allow_destructive = req.allow_destructive;
|
|
|
|
agent
|
|
.db
|
|
.dast_targets()
|
|
.insert_one(&target)
|
|
.await
|
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
|
|
|
Ok(Json(ApiResponse {
|
|
data: target,
|
|
total: None,
|
|
page: None,
|
|
}))
|
|
}
|
|
|
|
/// POST /api/v1/dast/targets/:id/scan — Trigger DAST scan
|
|
#[tracing::instrument(skip_all, fields(target_id = %id))]
|
|
pub async fn trigger_scan(
|
|
Extension(agent): AgentExt,
|
|
Path(id): Path<String>,
|
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
|
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
|
|
let target = agent
|
|
.db
|
|
.dast_targets()
|
|
.find_one(doc! { "_id": oid })
|
|
.await
|
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
|
.ok_or(StatusCode::NOT_FOUND)?;
|
|
|
|
let db = agent.db.clone();
|
|
tokio::spawn(async move {
|
|
let orchestrator = compliance_dast::DastOrchestrator::new(100);
|
|
match orchestrator.run_scan(&target, Vec::new()).await {
|
|
Ok((scan_run, findings)) => {
|
|
if let Err(e) = db.dast_scan_runs().insert_one(&scan_run).await {
|
|
tracing::error!("Failed to store DAST scan run: {e}");
|
|
}
|
|
for finding in &findings {
|
|
if let Err(e) = db.dast_findings().insert_one(finding).await {
|
|
tracing::error!("Failed to store DAST finding: {e}");
|
|
}
|
|
}
|
|
tracing::info!("DAST scan complete: {} findings", findings.len());
|
|
}
|
|
Err(e) => {
|
|
tracing::error!("DAST scan failed: {e}");
|
|
}
|
|
}
|
|
});
|
|
|
|
Ok(Json(serde_json::json!({ "status": "dast_scan_triggered" })))
|
|
}
|
|
|
|
/// GET /api/v1/dast/scan-runs — List DAST scan runs
|
|
#[tracing::instrument(skip_all)]
|
|
pub async fn list_scan_runs(
|
|
Extension(agent): AgentExt,
|
|
Query(params): Query<PaginationParams>,
|
|
) -> Result<Json<ApiResponse<Vec<DastScanRun>>>, StatusCode> {
|
|
let db = &agent.db;
|
|
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
|
let total = db
|
|
.dast_scan_runs()
|
|
.count_documents(doc! {})
|
|
.await
|
|
.unwrap_or(0);
|
|
|
|
let runs = match db
|
|
.dast_scan_runs()
|
|
.find(doc! {})
|
|
.sort(doc! { "started_at": -1 })
|
|
.skip(skip)
|
|
.limit(params.limit)
|
|
.await
|
|
{
|
|
Ok(cursor) => collect_cursor_async(cursor).await,
|
|
Err(e) => {
|
|
tracing::warn!("Failed to fetch DAST scan runs: {e}");
|
|
Vec::new()
|
|
}
|
|
};
|
|
|
|
Ok(Json(ApiResponse {
|
|
data: runs,
|
|
total: Some(total),
|
|
page: Some(params.page),
|
|
}))
|
|
}
|
|
|
|
/// GET /api/v1/dast/findings — List DAST findings
|
|
#[tracing::instrument(skip_all)]
|
|
pub async fn list_findings(
|
|
Extension(agent): AgentExt,
|
|
Query(params): Query<PaginationParams>,
|
|
) -> Result<Json<ApiResponse<Vec<DastFinding>>>, StatusCode> {
|
|
let db = &agent.db;
|
|
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
|
let total = db
|
|
.dast_findings()
|
|
.count_documents(doc! {})
|
|
.await
|
|
.unwrap_or(0);
|
|
|
|
let findings = match db
|
|
.dast_findings()
|
|
.find(doc! {})
|
|
.sort(doc! { "created_at": -1 })
|
|
.skip(skip)
|
|
.limit(params.limit)
|
|
.await
|
|
{
|
|
Ok(cursor) => collect_cursor_async(cursor).await,
|
|
Err(e) => {
|
|
tracing::warn!("Failed to fetch DAST findings: {e}");
|
|
Vec::new()
|
|
}
|
|
};
|
|
|
|
Ok(Json(ApiResponse {
|
|
data: findings,
|
|
total: Some(total),
|
|
page: Some(params.page),
|
|
}))
|
|
}
|
|
|
|
/// GET /api/v1/dast/findings/:id — Finding detail with evidence
|
|
#[tracing::instrument(skip_all, fields(finding_id = %id))]
|
|
pub async fn get_finding(
|
|
Extension(agent): AgentExt,
|
|
Path(id): Path<String>,
|
|
) -> Result<Json<ApiResponse<DastFinding>>, StatusCode> {
|
|
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
|
|
let finding = agent
|
|
.db
|
|
.dast_findings()
|
|
.find_one(doc! { "_id": oid })
|
|
.await
|
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
|
.ok_or(StatusCode::NOT_FOUND)?;
|
|
|
|
Ok(Json(ApiResponse {
|
|
data: finding,
|
|
total: None,
|
|
page: None,
|
|
}))
|
|
}
|