Run cargo fmt across all crates
Some checks failed
CI / Format (push) Successful in 2s
CI / Clippy (push) Failing after 1m23s
CI / Security Audit (push) Has been skipped
CI / Tests (push) Has been skipped
CI / Clippy (pull_request) Failing after 1m18s
CI / Security Audit (pull_request) Has been skipped
CI / Tests (pull_request) Has been skipped
CI / Format (pull_request) Successful in 3s
Some checks failed
CI / Format (push) Successful in 2s
CI / Clippy (push) Failing after 1m23s
CI / Security Audit (push) Has been skipped
CI / Tests (push) Has been skipped
CI / Clippy (pull_request) Failing after 1m18s
CI / Security Audit (pull_request) Has been skipped
CI / Tests (pull_request) Has been skipped
CI / Format (pull_request) Successful in 3s
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -103,8 +103,7 @@ pub async fn trigger_scan(
|
|||||||
Extension(agent): AgentExt,
|
Extension(agent): AgentExt,
|
||||||
Path(id): Path<String>,
|
Path(id): Path<String>,
|
||||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
let oid =
|
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
||||||
|
|
||||||
let target = agent
|
let target = agent
|
||||||
.db
|
.db
|
||||||
@@ -207,8 +206,7 @@ pub async fn get_finding(
|
|||||||
Extension(agent): AgentExt,
|
Extension(agent): AgentExt,
|
||||||
Path(id): Path<String>,
|
Path(id): Path<String>,
|
||||||
) -> Result<Json<ApiResponse<DastFinding>>, StatusCode> {
|
) -> Result<Json<ApiResponse<DastFinding>>, StatusCode> {
|
||||||
let oid =
|
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
|
||||||
|
|
||||||
let finding = agent
|
let finding = agent
|
||||||
.db
|
.db
|
||||||
|
|||||||
@@ -235,12 +235,7 @@ pub async fn get_file_content(
|
|||||||
// Cap at 10,000 lines
|
// Cap at 10,000 lines
|
||||||
let truncated: String = content.lines().take(10_000).collect::<Vec<_>>().join("\n");
|
let truncated: String = content.lines().take(10_000).collect::<Vec<_>>().join("\n");
|
||||||
|
|
||||||
let language = params
|
let language = params.path.rsplit('.').next().unwrap_or("").to_string();
|
||||||
.path
|
|
||||||
.rsplit('.')
|
|
||||||
.next()
|
|
||||||
.unwrap_or("")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
Ok(Json(ApiResponse {
|
Ok(Json(ApiResponse {
|
||||||
data: FileContent {
|
data: FileContent {
|
||||||
|
|||||||
@@ -185,7 +185,9 @@ impl PipelineOrchestrator {
|
|||||||
// Stage 4.5: Graph Building
|
// Stage 4.5: Graph Building
|
||||||
tracing::info!("[{repo_id}] Stage 4.5: Graph Building");
|
tracing::info!("[{repo_id}] Stage 4.5: Graph Building");
|
||||||
self.update_phase(scan_run_id, "graph_building").await;
|
self.update_phase(scan_run_id, "graph_building").await;
|
||||||
let graph_context = match self.build_code_graph(&repo_path, &repo_id, &all_findings).await
|
let graph_context = match self
|
||||||
|
.build_code_graph(&repo_path, &repo_id, &all_findings)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
Ok(ctx) => Some(ctx),
|
Ok(ctx) => Some(ctx),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -296,9 +298,10 @@ impl PipelineOrchestrator {
|
|||||||
let graph_build_id = uuid::Uuid::new_v4().to_string();
|
let graph_build_id = uuid::Uuid::new_v4().to_string();
|
||||||
let engine = compliance_graph::GraphEngine::new(50_000);
|
let engine = compliance_graph::GraphEngine::new(50_000);
|
||||||
|
|
||||||
let (mut code_graph, build_run) = engine
|
let (mut code_graph, build_run) =
|
||||||
.build_graph(repo_path, repo_id, &graph_build_id)
|
engine
|
||||||
.map_err(|e| AgentError::Other(format!("Graph build error: {e}")))?;
|
.build_graph(repo_path, repo_id, &graph_build_id)
|
||||||
|
.map_err(|e| AgentError::Other(format!("Graph build error: {e}")))?;
|
||||||
|
|
||||||
// Apply community detection
|
// Apply community detection
|
||||||
compliance_graph::graph::community::apply_communities(&mut code_graph);
|
compliance_graph::graph::community::apply_communities(&mut code_graph);
|
||||||
@@ -348,15 +351,11 @@ impl PipelineOrchestrator {
|
|||||||
use futures_util::TryStreamExt;
|
use futures_util::TryStreamExt;
|
||||||
|
|
||||||
let filter = mongodb::bson::doc! { "repo_id": repo_id };
|
let filter = mongodb::bson::doc! { "repo_id": repo_id };
|
||||||
let targets: Vec<compliance_core::models::DastTarget> = match self
|
let targets: Vec<compliance_core::models::DastTarget> =
|
||||||
.db
|
match self.db.dast_targets().find(filter).await {
|
||||||
.dast_targets()
|
Ok(cursor) => cursor.try_collect().await.unwrap_or_default(),
|
||||||
.find(filter)
|
Err(_) => return,
|
||||||
.await
|
};
|
||||||
{
|
|
||||||
Ok(cursor) => cursor.try_collect().await.unwrap_or_default(),
|
|
||||||
Err(_) => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
if targets.is_empty() {
|
if targets.is_empty() {
|
||||||
tracing::info!("[{repo_id}] No DAST targets configured, skipping");
|
tracing::info!("[{repo_id}] No DAST targets configured, skipping");
|
||||||
@@ -379,10 +378,7 @@ impl PipelineOrchestrator {
|
|||||||
tracing::error!("Failed to store DAST finding: {e}");
|
tracing::error!("Failed to store DAST finding: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tracing::info!(
|
tracing::info!("DAST scan complete: {} findings", findings.len());
|
||||||
"DAST scan complete: {} findings",
|
|
||||||
findings.len()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tracing::error!("DAST scan failed: {e}");
|
tracing::error!("DAST scan failed: {e}");
|
||||||
|
|||||||
@@ -31,9 +31,15 @@ pub struct TrackedRepository {
|
|||||||
pub last_scanned_commit: Option<String>,
|
pub last_scanned_commit: Option<String>,
|
||||||
#[serde(default, deserialize_with = "deserialize_findings_count")]
|
#[serde(default, deserialize_with = "deserialize_findings_count")]
|
||||||
pub findings_count: u32,
|
pub findings_count: u32,
|
||||||
#[serde(default = "chrono::Utc::now", deserialize_with = "deserialize_datetime")]
|
#[serde(
|
||||||
|
default = "chrono::Utc::now",
|
||||||
|
deserialize_with = "deserialize_datetime"
|
||||||
|
)]
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
#[serde(default = "chrono::Utc::now", deserialize_with = "deserialize_datetime")]
|
#[serde(
|
||||||
|
default = "chrono::Utc::now",
|
||||||
|
deserialize_with = "deserialize_datetime"
|
||||||
|
)]
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,9 +57,7 @@ where
|
|||||||
let bson = bson::Bson::deserialize(deserializer)?;
|
let bson = bson::Bson::deserialize(deserializer)?;
|
||||||
match bson {
|
match bson {
|
||||||
bson::Bson::DateTime(dt) => Ok(dt.into()),
|
bson::Bson::DateTime(dt) => Ok(dt.into()),
|
||||||
bson::Bson::String(s) => s
|
bson::Bson::String(s) => s.parse::<DateTime<Utc>>().map_err(serde::de::Error::custom),
|
||||||
.parse::<DateTime<Utc>>()
|
|
||||||
.map_err(serde::de::Error::custom),
|
|
||||||
other => Err(serde::de::Error::custom(format!(
|
other => Err(serde::de::Error::custom(format!(
|
||||||
"expected DateTime or string, got: {other:?}"
|
"expected DateTime or string, got: {other:?}"
|
||||||
))),
|
))),
|
||||||
|
|||||||
@@ -47,17 +47,19 @@ fn insert_path(
|
|||||||
let name = parts[0].to_string();
|
let name = parts[0].to_string();
|
||||||
let is_leaf = parts.len() == 1;
|
let is_leaf = parts.len() == 1;
|
||||||
|
|
||||||
let entry = children.entry(name.clone()).or_insert_with(|| FileTreeNode {
|
let entry = children
|
||||||
name: name.clone(),
|
.entry(name.clone())
|
||||||
path: if is_leaf {
|
.or_insert_with(|| FileTreeNode {
|
||||||
full_path.to_string()
|
name: name.clone(),
|
||||||
} else {
|
path: if is_leaf {
|
||||||
String::new()
|
full_path.to_string()
|
||||||
},
|
} else {
|
||||||
is_dir: !is_leaf,
|
String::new()
|
||||||
node_count: 0,
|
},
|
||||||
children: Vec::new(),
|
is_dir: !is_leaf,
|
||||||
});
|
node_count: 0,
|
||||||
|
children: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
if is_leaf {
|
if is_leaf {
|
||||||
entry.node_count = node_count;
|
entry.node_count = node_count;
|
||||||
|
|||||||
@@ -39,11 +39,11 @@ impl Toasts {
|
|||||||
|
|
||||||
#[cfg(feature = "web")]
|
#[cfg(feature = "web")]
|
||||||
{
|
{
|
||||||
let mut items = self.items;
|
let mut items = self.items;
|
||||||
spawn(async move {
|
spawn(async move {
|
||||||
gloo_timers::future::TimeoutFuture::new(4_000).await;
|
gloo_timers::future::TimeoutFuture::new(4_000).await;
|
||||||
items.write().retain(|t| t.id != id);
|
items.write().retain(|t| t.id != id);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -87,10 +87,7 @@ pub async fn fetch_dast_finding_detail(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[server]
|
#[server]
|
||||||
pub async fn add_dast_target(
|
pub async fn add_dast_target(name: String, base_url: String) -> Result<(), ServerFnError> {
|
||||||
name: String,
|
|
||||||
base_url: String,
|
|
||||||
) -> Result<(), ServerFnError> {
|
|
||||||
let state: super::server_state::ServerState =
|
let state: super::server_state::ServerState =
|
||||||
dioxus_fullstack::FullstackContext::extract().await?;
|
dioxus_fullstack::FullstackContext::extract().await?;
|
||||||
let url = format!("{}/api/v1/dast/targets", state.agent_api_url);
|
let url = format!("{}/api/v1/dast/targets", state.agent_api_url);
|
||||||
|
|||||||
@@ -121,10 +121,7 @@ pub async fn fetch_file_content(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[server]
|
#[server]
|
||||||
pub async fn search_nodes(
|
pub async fn search_nodes(repo_id: String, query: String) -> Result<SearchResponse, ServerFnError> {
|
||||||
repo_id: String,
|
|
||||||
query: String,
|
|
||||||
) -> Result<SearchResponse, ServerFnError> {
|
|
||||||
let state: super::server_state::ServerState =
|
let state: super::server_state::ServerState =
|
||||||
dioxus_fullstack::FullstackContext::extract().await?;
|
dioxus_fullstack::FullstackContext::extract().await?;
|
||||||
let url = format!(
|
let url = format!(
|
||||||
|
|||||||
@@ -14,7 +14,9 @@ pub fn FindingsPage() -> Element {
|
|||||||
let mut repo_filter = use_signal(String::new);
|
let mut repo_filter = use_signal(String::new);
|
||||||
|
|
||||||
let repos = use_resource(|| async {
|
let repos = use_resource(|| async {
|
||||||
crate::infrastructure::repositories::fetch_repositories(1).await.ok()
|
crate::infrastructure::repositories::fetch_repositories(1)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
});
|
});
|
||||||
|
|
||||||
let findings = use_resource(move || {
|
let findings = use_resource(move || {
|
||||||
|
|||||||
@@ -234,10 +234,7 @@ impl ApiFuzzerAgent {
|
|||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
let headers = response.headers();
|
let headers = response.headers();
|
||||||
let acao = headers
|
let acao = headers.get("access-control-allow-origin")?.to_str().ok()?;
|
||||||
.get("access-control-allow-origin")?
|
|
||||||
.to_str()
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
if acao == "*" || acao == "https://evil.com" {
|
if acao == "*" || acao == "https://evil.com" {
|
||||||
let acac = headers
|
let acac = headers
|
||||||
@@ -265,12 +262,9 @@ impl ApiFuzzerAgent {
|
|||||||
request_body: None,
|
request_body: None,
|
||||||
response_status: response.status().as_u16(),
|
response_status: response.status().as_u16(),
|
||||||
response_headers: Some(
|
response_headers: Some(
|
||||||
[(
|
[("Access-Control-Allow-Origin".to_string(), acao.to_string())]
|
||||||
"Access-Control-Allow-Origin".to_string(),
|
.into_iter()
|
||||||
acao.to_string(),
|
.collect(),
|
||||||
)]
|
|
||||||
.into_iter()
|
|
||||||
.collect(),
|
|
||||||
),
|
),
|
||||||
response_snippet: None,
|
response_snippet: None,
|
||||||
screenshot_path: None,
|
screenshot_path: None,
|
||||||
|
|||||||
@@ -132,7 +132,10 @@ impl DastAgent for AuthBypassAgent {
|
|||||||
String::new(),
|
String::new(),
|
||||||
target_id.clone(),
|
target_id.clone(),
|
||||||
DastVulnType::AuthBypass,
|
DastVulnType::AuthBypass,
|
||||||
format!("HTTP method tampering: {} accepted on {}", method, endpoint.url),
|
format!(
|
||||||
|
"HTTP method tampering: {} accepted on {}",
|
||||||
|
method, endpoint.url
|
||||||
|
),
|
||||||
format!(
|
format!(
|
||||||
"Endpoint {} accepts {} requests which may bypass access controls.",
|
"Endpoint {} accepts {} requests which may bypass access controls.",
|
||||||
endpoint.url, method
|
endpoint.url, method
|
||||||
|
|||||||
@@ -20,10 +20,7 @@ impl SsrfAgent {
|
|||||||
("http://[::1]", "localhost IPv6"),
|
("http://[::1]", "localhost IPv6"),
|
||||||
("http://0.0.0.0", "zero address"),
|
("http://0.0.0.0", "zero address"),
|
||||||
("http://169.254.169.254/latest/meta-data/", "AWS metadata"),
|
("http://169.254.169.254/latest/meta-data/", "AWS metadata"),
|
||||||
(
|
("http://metadata.google.internal/", "GCP metadata"),
|
||||||
"http://metadata.google.internal/",
|
|
||||||
"GCP metadata",
|
|
||||||
),
|
|
||||||
("http://127.0.0.1:22", "SSH port probe"),
|
("http://127.0.0.1:22", "SSH port probe"),
|
||||||
("http://127.0.0.1:3306", "MySQL port probe"),
|
("http://127.0.0.1:3306", "MySQL port probe"),
|
||||||
("http://localhost/admin", "localhost admin"),
|
("http://localhost/admin", "localhost admin"),
|
||||||
@@ -91,10 +88,7 @@ impl DastAgent for SsrfAgent {
|
|||||||
.post(&endpoint.url)
|
.post(&endpoint.url)
|
||||||
.form(&[(param.name.as_str(), payload)])
|
.form(&[(param.name.as_str(), payload)])
|
||||||
} else {
|
} else {
|
||||||
let test_url = format!(
|
let test_url = format!("{}?{}={}", endpoint.url, param.name, payload);
|
||||||
"{}?{}={}",
|
|
||||||
endpoint.url, param.name, payload
|
|
||||||
);
|
|
||||||
self.http.get(&test_url)
|
self.http.get(&test_url)
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -133,10 +127,7 @@ impl DastAgent for SsrfAgent {
|
|||||||
String::new(),
|
String::new(),
|
||||||
target_id.clone(),
|
target_id.clone(),
|
||||||
DastVulnType::Ssrf,
|
DastVulnType::Ssrf,
|
||||||
format!(
|
format!("SSRF ({technique}) via parameter '{}'", param.name),
|
||||||
"SSRF ({technique}) via parameter '{}'",
|
|
||||||
param.name
|
|
||||||
),
|
|
||||||
format!(
|
format!(
|
||||||
"Server-side request forgery detected in parameter '{}' at {}. \
|
"Server-side request forgery detected in parameter '{}' at {}. \
|
||||||
The application made a request to an internal resource ({}).",
|
The application made a request to an internal resource ({}).",
|
||||||
|
|||||||
@@ -17,26 +17,11 @@ impl XssAgent {
|
|||||||
fn payloads(&self) -> Vec<(&str, &str)> {
|
fn payloads(&self) -> Vec<(&str, &str)> {
|
||||||
vec![
|
vec![
|
||||||
("<script>alert(1)</script>", "basic script injection"),
|
("<script>alert(1)</script>", "basic script injection"),
|
||||||
(
|
("<img src=x onerror=alert(1)>", "event handler injection"),
|
||||||
"<img src=x onerror=alert(1)>",
|
("<svg/onload=alert(1)>", "svg event handler"),
|
||||||
"event handler injection",
|
("javascript:alert(1)", "javascript protocol"),
|
||||||
),
|
("'\"><script>alert(1)</script>", "attribute breakout"),
|
||||||
(
|
("<body onload=alert(1)>", "body event handler"),
|
||||||
"<svg/onload=alert(1)>",
|
|
||||||
"svg event handler",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"javascript:alert(1)",
|
|
||||||
"javascript protocol",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"'\"><script>alert(1)</script>",
|
|
||||||
"attribute breakout",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"<body onload=alert(1)>",
|
|
||||||
"body event handler",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -65,10 +50,7 @@ impl DastAgent for XssAgent {
|
|||||||
for param in &endpoint.parameters {
|
for param in &endpoint.parameters {
|
||||||
for (payload, technique) in self.payloads() {
|
for (payload, technique) in self.payloads() {
|
||||||
let test_url = if endpoint.method == "GET" {
|
let test_url = if endpoint.method == "GET" {
|
||||||
format!(
|
format!("{}?{}={}", endpoint.url, param.name, payload)
|
||||||
"{}?{}={}",
|
|
||||||
endpoint.url, param.name, payload
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
endpoint.url.clone()
|
endpoint.url.clone()
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -28,8 +28,8 @@ impl WebCrawler {
|
|||||||
base_url: &str,
|
base_url: &str,
|
||||||
excluded_paths: &[String],
|
excluded_paths: &[String],
|
||||||
) -> Result<Vec<DiscoveredEndpoint>, CoreError> {
|
) -> Result<Vec<DiscoveredEndpoint>, CoreError> {
|
||||||
let base = Url::parse(base_url)
|
let base =
|
||||||
.map_err(|e| CoreError::Dast(format!("Invalid base URL: {e}")))?;
|
Url::parse(base_url).map_err(|e| CoreError::Dast(format!("Invalid base URL: {e}")))?;
|
||||||
|
|
||||||
let mut visited: HashSet<String> = HashSet::new();
|
let mut visited: HashSet<String> = HashSet::new();
|
||||||
let mut endpoints: Vec<DiscoveredEndpoint> = Vec::new();
|
let mut endpoints: Vec<DiscoveredEndpoint> = Vec::new();
|
||||||
@@ -95,12 +95,13 @@ impl WebCrawler {
|
|||||||
let document = Html::parse_document(&body);
|
let document = Html::parse_document(&body);
|
||||||
|
|
||||||
// Extract links
|
// Extract links
|
||||||
let link_selector =
|
let link_selector = Selector::parse("a[href]")
|
||||||
Selector::parse("a[href]").unwrap_or_else(|_| Selector::parse("a").expect("valid selector"));
|
.unwrap_or_else(|_| Selector::parse("a").expect("valid selector"));
|
||||||
for element in document.select(&link_selector) {
|
for element in document.select(&link_selector) {
|
||||||
if let Some(href) = element.value().attr("href") {
|
if let Some(href) = element.value().attr("href") {
|
||||||
if let Some(absolute_url) = self.resolve_url(&base, &url, href) {
|
if let Some(absolute_url) = self.resolve_url(&base, &url, href) {
|
||||||
if self.is_same_origin(&base, &absolute_url) && !visited.contains(&absolute_url)
|
if self.is_same_origin(&base, &absolute_url)
|
||||||
|
&& !visited.contains(&absolute_url)
|
||||||
{
|
{
|
||||||
queue.push((absolute_url, depth + 1));
|
queue.push((absolute_url, depth + 1));
|
||||||
}
|
}
|
||||||
@@ -116,11 +117,7 @@ impl WebCrawler {
|
|||||||
|
|
||||||
for form in document.select(&form_selector) {
|
for form in document.select(&form_selector) {
|
||||||
let action = form.value().attr("action").unwrap_or("");
|
let action = form.value().attr("action").unwrap_or("");
|
||||||
let method = form
|
let method = form.value().attr("method").unwrap_or("GET").to_uppercase();
|
||||||
.value()
|
|
||||||
.attr("method")
|
|
||||||
.unwrap_or("GET")
|
|
||||||
.to_uppercase();
|
|
||||||
|
|
||||||
let form_url = self
|
let form_url = self
|
||||||
.resolve_url(&base, &url, action)
|
.resolve_url(&base, &url, action)
|
||||||
@@ -128,20 +125,12 @@ impl WebCrawler {
|
|||||||
|
|
||||||
let mut params = Vec::new();
|
let mut params = Vec::new();
|
||||||
for input in form.select(&input_selector) {
|
for input in form.select(&input_selector) {
|
||||||
let name = input
|
let name = input.value().attr("name").unwrap_or("").to_string();
|
||||||
.value()
|
|
||||||
.attr("name")
|
|
||||||
.unwrap_or("")
|
|
||||||
.to_string();
|
|
||||||
if name.is_empty() {
|
if name.is_empty() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let input_type = input
|
let input_type = input.value().attr("type").unwrap_or("text").to_string();
|
||||||
.value()
|
|
||||||
.attr("type")
|
|
||||||
.unwrap_or("text")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let location = if method == "GET" {
|
let location = if method == "GET" {
|
||||||
"query".to_string()
|
"query".to_string()
|
||||||
|
|||||||
@@ -149,11 +149,8 @@ impl DastOrchestrator {
|
|||||||
let t2 = target.clone();
|
let t2 = target.clone();
|
||||||
let c2 = context.clone();
|
let c2 = context.clone();
|
||||||
let h2 = http.clone();
|
let h2 = http.clone();
|
||||||
let xss_handle = tokio::spawn(async move {
|
let xss_handle =
|
||||||
crate::agents::xss::XssAgent::new(h2)
|
tokio::spawn(async move { crate::agents::xss::XssAgent::new(h2).run(&t2, &c2).await });
|
||||||
.run(&t2, &c2)
|
|
||||||
.await
|
|
||||||
});
|
|
||||||
|
|
||||||
let t3 = target.clone();
|
let t3 = target.clone();
|
||||||
let c3 = context.clone();
|
let c3 = context.clone();
|
||||||
@@ -167,11 +164,10 @@ impl DastOrchestrator {
|
|||||||
let t4 = target.clone();
|
let t4 = target.clone();
|
||||||
let c4 = context.clone();
|
let c4 = context.clone();
|
||||||
let h4 = http.clone();
|
let h4 = http.clone();
|
||||||
let ssrf_handle = tokio::spawn(async move {
|
let ssrf_handle =
|
||||||
crate::agents::ssrf::SsrfAgent::new(h4)
|
tokio::spawn(
|
||||||
.run(&t4, &c4)
|
async move { crate::agents::ssrf::SsrfAgent::new(h4).run(&t4, &c4).await },
|
||||||
.await
|
);
|
||||||
});
|
|
||||||
|
|
||||||
let t5 = target.clone();
|
let t5 = target.clone();
|
||||||
let c5 = context.clone();
|
let c5 = context.clone();
|
||||||
@@ -182,8 +178,13 @@ impl DastOrchestrator {
|
|||||||
.await
|
.await
|
||||||
});
|
});
|
||||||
|
|
||||||
let handles: Vec<tokio::task::JoinHandle<Result<Vec<DastFinding>, CoreError>>> =
|
let handles: Vec<tokio::task::JoinHandle<Result<Vec<DastFinding>, CoreError>>> = vec![
|
||||||
vec![sqli_handle, xss_handle, auth_handle, ssrf_handle, api_handle];
|
sqli_handle,
|
||||||
|
xss_handle,
|
||||||
|
auth_handle,
|
||||||
|
ssrf_handle,
|
||||||
|
api_handle,
|
||||||
|
];
|
||||||
|
|
||||||
let mut all_findings = Vec::new();
|
let mut all_findings = Vec::new();
|
||||||
for handle in handles {
|
for handle in handles {
|
||||||
|
|||||||
@@ -81,10 +81,9 @@ impl ReconAgent {
|
|||||||
];
|
];
|
||||||
for header in &missing_security {
|
for header in &missing_security {
|
||||||
if !headers.contains_key(*header) {
|
if !headers.contains_key(*header) {
|
||||||
result.interesting_headers.insert(
|
result
|
||||||
format!("missing:{header}"),
|
.interesting_headers
|
||||||
"Not present".to_string(),
|
.insert(format!("missing:{header}"), "Not present".to_string());
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,8 +26,11 @@ impl<'a> ImpactAnalyzer<'a> {
|
|||||||
file_path: &str,
|
file_path: &str,
|
||||||
line_number: Option<u32>,
|
line_number: Option<u32>,
|
||||||
) -> ImpactAnalysis {
|
) -> ImpactAnalysis {
|
||||||
let mut analysis =
|
let mut analysis = ImpactAnalysis::new(
|
||||||
ImpactAnalysis::new(repo_id.to_string(), finding_id.to_string(), graph_build_id.to_string());
|
repo_id.to_string(),
|
||||||
|
finding_id.to_string(),
|
||||||
|
graph_build_id.to_string(),
|
||||||
|
);
|
||||||
|
|
||||||
// Find the node containing the finding
|
// Find the node containing the finding
|
||||||
let target_node = self.find_node_at_location(file_path, line_number);
|
let target_node = self.find_node_at_location(file_path, line_number);
|
||||||
@@ -97,7 +100,11 @@ impl<'a> ImpactAnalyzer<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Find the graph node at a given file/line location
|
/// Find the graph node at a given file/line location
|
||||||
fn find_node_at_location(&self, file_path: &str, line_number: Option<u32>) -> Option<NodeIndex> {
|
fn find_node_at_location(
|
||||||
|
&self,
|
||||||
|
file_path: &str,
|
||||||
|
line_number: Option<u32>,
|
||||||
|
) -> Option<NodeIndex> {
|
||||||
let mut best: Option<(NodeIndex, u32)> = None; // (index, line_span)
|
let mut best: Option<(NodeIndex, u32)> = None; // (index, line_span)
|
||||||
|
|
||||||
for node in &self.code_graph.nodes {
|
for node in &self.code_graph.nodes {
|
||||||
@@ -166,12 +173,7 @@ impl<'a> ImpactAnalyzer<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Find a path from source to target (BFS, limited depth)
|
/// Find a path from source to target (BFS, limited depth)
|
||||||
fn find_path(
|
fn find_path(&self, from: NodeIndex, to: NodeIndex, max_depth: usize) -> Option<Vec<String>> {
|
||||||
&self,
|
|
||||||
from: NodeIndex,
|
|
||||||
to: NodeIndex,
|
|
||||||
max_depth: usize,
|
|
||||||
) -> Option<Vec<String>> {
|
|
||||||
let mut visited = HashSet::new();
|
let mut visited = HashSet::new();
|
||||||
let mut queue: VecDeque<(NodeIndex, Vec<NodeIndex>)> = VecDeque::new();
|
let mut queue: VecDeque<(NodeIndex, Vec<NodeIndex>)> = VecDeque::new();
|
||||||
queue.push_back((from, vec![from]));
|
queue.push_back((from, vec![from]));
|
||||||
@@ -209,7 +211,10 @@ impl<'a> ImpactAnalyzer<'a> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_node_by_index(&self, idx: NodeIndex) -> Option<&compliance_core::models::graph::CodeNode> {
|
fn get_node_by_index(
|
||||||
|
&self,
|
||||||
|
idx: NodeIndex,
|
||||||
|
) -> Option<&compliance_core::models::graph::CodeNode> {
|
||||||
let target_gi = idx.index() as u32;
|
let target_gi = idx.index() as u32;
|
||||||
self.code_graph
|
self.code_graph
|
||||||
.nodes
|
.nodes
|
||||||
|
|||||||
@@ -211,8 +211,6 @@ impl GraphStore {
|
|||||||
repo_id: &str,
|
repo_id: &str,
|
||||||
graph_build_id: &str,
|
graph_build_id: &str,
|
||||||
) -> Result<Vec<CommunityInfo>, CoreError> {
|
) -> Result<Vec<CommunityInfo>, CoreError> {
|
||||||
|
|
||||||
|
|
||||||
let filter = doc! {
|
let filter = doc! {
|
||||||
"repo_id": repo_id,
|
"repo_id": repo_id,
|
||||||
"graph_build_id": graph_build_id,
|
"graph_build_id": graph_build_id,
|
||||||
|
|||||||
@@ -51,7 +51,13 @@ impl JavaScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified, output,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -97,7 +103,12 @@ impl JavaScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.walk_children(
|
self.walk_children(
|
||||||
body, source, file_path, repo_id, graph_build_id, Some(&qualified),
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
Some(&qualified),
|
||||||
output,
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -130,7 +141,13 @@ impl JavaScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified, output,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -138,7 +155,13 @@ impl JavaScriptParser {
|
|||||||
// Arrow functions assigned to variables: const foo = () => {}
|
// Arrow functions assigned to variables: const foo = () => {}
|
||||||
"lexical_declaration" | "variable_declaration" => {
|
"lexical_declaration" | "variable_declaration" => {
|
||||||
self.extract_arrow_functions(
|
self.extract_arrow_functions(
|
||||||
node, source, file_path, repo_id, graph_build_id, parent_qualified, output,
|
node,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
parent_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
"import_statement" => {
|
"import_statement" => {
|
||||||
@@ -183,7 +206,13 @@ impl JavaScriptParser {
|
|||||||
let mut cursor = node.walk();
|
let mut cursor = node.walk();
|
||||||
for child in node.children(&mut cursor) {
|
for child in node.children(&mut cursor) {
|
||||||
self.walk_tree(
|
self.walk_tree(
|
||||||
child, source, file_path, repo_id, graph_build_id, parent_qualified, output,
|
child,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
parent_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -217,7 +246,13 @@ impl JavaScriptParser {
|
|||||||
let mut cursor = node.walk();
|
let mut cursor = node.walk();
|
||||||
for child in node.children(&mut cursor) {
|
for child in node.children(&mut cursor) {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
child, source, file_path, repo_id, graph_build_id, caller_qualified, output,
|
child,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
caller_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -263,7 +298,12 @@ impl JavaScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = value_n.child_by_field_name("body") {
|
if let Some(body) = value_n.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
output,
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,10 +57,7 @@ impl ParserRegistry {
|
|||||||
repo_id: &str,
|
repo_id: &str,
|
||||||
graph_build_id: &str,
|
graph_build_id: &str,
|
||||||
) -> Result<Option<ParseOutput>, CoreError> {
|
) -> Result<Option<ParseOutput>, CoreError> {
|
||||||
let ext = file_path
|
let ext = file_path.extension().and_then(|e| e.to_str()).unwrap_or("");
|
||||||
.extension()
|
|
||||||
.and_then(|e| e.to_str())
|
|
||||||
.unwrap_or("");
|
|
||||||
|
|
||||||
let parser_idx = match self.extension_map.get(ext) {
|
let parser_idx = match self.extension_map.get(ext) {
|
||||||
Some(idx) => *idx,
|
Some(idx) => *idx,
|
||||||
@@ -89,7 +86,15 @@ impl ParserRegistry {
|
|||||||
let mut combined = ParseOutput::default();
|
let mut combined = ParseOutput::default();
|
||||||
let mut node_count: u32 = 0;
|
let mut node_count: u32 = 0;
|
||||||
|
|
||||||
self.walk_directory(dir, dir, repo_id, graph_build_id, max_nodes, &mut node_count, &mut combined)?;
|
self.walk_directory(
|
||||||
|
dir,
|
||||||
|
dir,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
max_nodes,
|
||||||
|
&mut node_count,
|
||||||
|
&mut combined,
|
||||||
|
)?;
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
nodes = combined.nodes.len(),
|
nodes = combined.nodes.len(),
|
||||||
@@ -162,8 +167,7 @@ impl ParserRegistry {
|
|||||||
Err(_) => continue, // Skip binary/unreadable files
|
Err(_) => continue, // Skip binary/unreadable files
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(output) = self.parse_file(rel_path, &source, repo_id, graph_build_id)?
|
if let Some(output) = self.parse_file(rel_path, &source, repo_id, graph_build_id)? {
|
||||||
{
|
|
||||||
*node_count += output.nodes.len() as u32;
|
*node_count += output.nodes.len() as u32;
|
||||||
combined.nodes.extend(output.nodes);
|
combined.nodes.extend(output.nodes);
|
||||||
combined.edges.extend(output.edges);
|
combined.edges.extend(output.edges);
|
||||||
|
|||||||
@@ -196,9 +196,7 @@ impl RustParser {
|
|||||||
id: None,
|
id: None,
|
||||||
repo_id: repo_id.to_string(),
|
repo_id: repo_id.to_string(),
|
||||||
graph_build_id: graph_build_id.to_string(),
|
graph_build_id: graph_build_id.to_string(),
|
||||||
source: parent_qualified
|
source: parent_qualified.unwrap_or(file_path).to_string(),
|
||||||
.unwrap_or(file_path)
|
|
||||||
.to_string(),
|
|
||||||
target: path,
|
target: path,
|
||||||
kind: CodeEdgeKind::Imports,
|
kind: CodeEdgeKind::Imports,
|
||||||
file_path: file_path.to_string(),
|
file_path: file_path.to_string(),
|
||||||
@@ -354,10 +352,7 @@ impl RustParser {
|
|||||||
|
|
||||||
fn extract_use_path(&self, use_text: &str) -> Option<String> {
|
fn extract_use_path(&self, use_text: &str) -> Option<String> {
|
||||||
// "use foo::bar::baz;" -> "foo::bar::baz"
|
// "use foo::bar::baz;" -> "foo::bar::baz"
|
||||||
let trimmed = use_text
|
let trimmed = use_text.strip_prefix("use ")?.trim_end_matches(';').trim();
|
||||||
.strip_prefix("use ")?
|
|
||||||
.trim_end_matches(';')
|
|
||||||
.trim();
|
|
||||||
Some(trimmed.to_string())
|
Some(trimmed.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,7 +49,13 @@ impl TypeScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified, output,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -80,12 +86,23 @@ impl TypeScriptParser {
|
|||||||
|
|
||||||
// Heritage clause (extends/implements)
|
// Heritage clause (extends/implements)
|
||||||
self.extract_heritage(
|
self.extract_heritage(
|
||||||
&node, source, file_path, repo_id, graph_build_id, &qualified, output,
|
&node,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.walk_children(
|
self.walk_children(
|
||||||
body, source, file_path, repo_id, graph_build_id, Some(&qualified),
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
Some(&qualified),
|
||||||
output,
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -143,14 +160,26 @@ impl TypeScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = node.child_by_field_name("body") {
|
if let Some(body) = node.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified, output,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"lexical_declaration" | "variable_declaration" => {
|
"lexical_declaration" | "variable_declaration" => {
|
||||||
self.extract_arrow_functions(
|
self.extract_arrow_functions(
|
||||||
node, source, file_path, repo_id, graph_build_id, parent_qualified, output,
|
node,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
parent_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
"import_statement" => {
|
"import_statement" => {
|
||||||
@@ -172,7 +201,13 @@ impl TypeScriptParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.walk_children(
|
self.walk_children(
|
||||||
node, source, file_path, repo_id, graph_build_id, parent_qualified, output,
|
node,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
parent_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,7 +224,13 @@ impl TypeScriptParser {
|
|||||||
let mut cursor = node.walk();
|
let mut cursor = node.walk();
|
||||||
for child in node.children(&mut cursor) {
|
for child in node.children(&mut cursor) {
|
||||||
self.walk_tree(
|
self.walk_tree(
|
||||||
child, source, file_path, repo_id, graph_build_id, parent_qualified, output,
|
child,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
parent_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -223,7 +264,13 @@ impl TypeScriptParser {
|
|||||||
let mut cursor = node.walk();
|
let mut cursor = node.walk();
|
||||||
for child in node.children(&mut cursor) {
|
for child in node.children(&mut cursor) {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
child, source, file_path, repo_id, graph_build_id, caller_qualified, output,
|
child,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
caller_qualified,
|
||||||
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -269,7 +316,12 @@ impl TypeScriptParser {
|
|||||||
|
|
||||||
if let Some(body) = value_n.child_by_field_name("body") {
|
if let Some(body) = value_n.child_by_field_name("body") {
|
||||||
self.extract_calls(
|
self.extract_calls(
|
||||||
body, source, file_path, repo_id, graph_build_id, &qualified,
|
body,
|
||||||
|
source,
|
||||||
|
file_path,
|
||||||
|
repo_id,
|
||||||
|
graph_build_id,
|
||||||
|
&qualified,
|
||||||
output,
|
output,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -89,8 +89,10 @@ impl SymbolIndex {
|
|||||||
.map_err(|e| CoreError::Graph(format!("Failed to create reader: {e}")))?;
|
.map_err(|e| CoreError::Graph(format!("Failed to create reader: {e}")))?;
|
||||||
|
|
||||||
let searcher = reader.searcher();
|
let searcher = reader.searcher();
|
||||||
let query_parser =
|
let query_parser = QueryParser::for_index(
|
||||||
QueryParser::for_index(&self.index, vec![self.name_field, self.qualified_name_field]);
|
&self.index,
|
||||||
|
vec![self.name_field, self.qualified_name_field],
|
||||||
|
);
|
||||||
|
|
||||||
let query = query_parser
|
let query = query_parser
|
||||||
.parse_query(query_str)
|
.parse_query(query_str)
|
||||||
|
|||||||
Reference in New Issue
Block a user