feat: AI-driven automated penetration testing (#12)
Some checks failed
CI / Clippy (push) Failing after 1m51s
CI / Security Audit (push) Successful in 2m1s
CI / Tests (push) Has been skipped
CI / Detect Changes (push) Has been skipped
CI / Deploy Agent (push) Has been skipped
CI / Deploy Dashboard (push) Has been skipped
CI / Deploy Docs (push) Has been skipped
CI / Format (push) Failing after 42s
CI / Deploy MCP (push) Has been skipped

This commit was merged in pull request #12.
This commit is contained in:
2026-03-12 14:42:54 +00:00
parent 3ec1456b0d
commit acc5b86aa4
52 changed files with 11729 additions and 98 deletions

View File

@@ -0,0 +1,285 @@
use std::time::Instant;
use compliance_core::error::CoreError;
use compliance_core::models::dast::{DastEvidence, DastFinding, DastVulnType};
use compliance_core::models::Severity;
use compliance_core::traits::pentest_tool::{PentestTool, PentestToolContext, PentestToolResult};
use serde_json::json;
use tracing::{info, warn};
/// Tool that tests whether a target enforces rate limiting.
pub struct RateLimitTesterTool {
http: reqwest::Client,
}
impl RateLimitTesterTool {
pub fn new(http: reqwest::Client) -> Self {
Self { http }
}
}
impl PentestTool for RateLimitTesterTool {
fn name(&self) -> &str {
"rate_limit_tester"
}
fn description(&self) -> &str {
"Tests whether an endpoint enforces rate limiting by sending rapid sequential requests. \
Checks for 429 responses and measures response time degradation."
}
fn input_schema(&self) -> serde_json::Value {
json!({
"type": "object",
"properties": {
"url": {
"type": "string",
"description": "URL of the endpoint to test for rate limiting"
},
"method": {
"type": "string",
"description": "HTTP method to use",
"enum": ["GET", "POST", "PUT", "PATCH", "DELETE"],
"default": "GET"
},
"request_count": {
"type": "integer",
"description": "Number of rapid requests to send (default: 50)",
"default": 50,
"minimum": 10,
"maximum": 200
},
"body": {
"type": "string",
"description": "Optional request body (for POST/PUT/PATCH)"
}
},
"required": ["url"]
})
}
fn execute<'a>(
&'a self,
input: serde_json::Value,
context: &'a PentestToolContext,
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<PentestToolResult, CoreError>> + Send + 'a>> {
Box::pin(async move {
let url = input
.get("url")
.and_then(|v| v.as_str())
.ok_or_else(|| CoreError::Dast("Missing required 'url' parameter".to_string()))?;
let method = input
.get("method")
.and_then(|v| v.as_str())
.unwrap_or("GET");
let request_count = input
.get("request_count")
.and_then(|v| v.as_u64())
.unwrap_or(50)
.min(200) as usize;
let body = input.get("body").and_then(|v| v.as_str());
let target_id = context
.target
.id
.map(|oid| oid.to_hex())
.unwrap_or_else(|| "unknown".to_string());
// Respect the context rate limit if set
let max_requests = if context.rate_limit > 0 {
request_count.min(context.rate_limit as usize * 5)
} else {
request_count
};
let mut status_codes: Vec<u16> = Vec::with_capacity(max_requests);
let mut response_times: Vec<u128> = Vec::with_capacity(max_requests);
let mut got_429 = false;
let mut rate_limit_at_request: Option<usize> = None;
for i in 0..max_requests {
let start = Instant::now();
let request = match method {
"POST" => {
let mut req = self.http.post(url);
if let Some(b) = body {
req = req.body(b.to_string());
}
req
}
"PUT" => {
let mut req = self.http.put(url);
if let Some(b) = body {
req = req.body(b.to_string());
}
req
}
"PATCH" => {
let mut req = self.http.patch(url);
if let Some(b) = body {
req = req.body(b.to_string());
}
req
}
"DELETE" => self.http.delete(url),
_ => self.http.get(url),
};
match request.send().await {
Ok(resp) => {
let elapsed = start.elapsed().as_millis();
let status = resp.status().as_u16();
status_codes.push(status);
response_times.push(elapsed);
if status == 429 && !got_429 {
got_429 = true;
rate_limit_at_request = Some(i + 1);
info!(url, request_num = i + 1, "Rate limit triggered (429)");
}
// Check for rate limit headers even on 200
if !got_429 {
let headers = resp.headers();
let has_rate_headers = headers.contains_key("x-ratelimit-limit")
|| headers.contains_key("x-ratelimit-remaining")
|| headers.contains_key("ratelimit-limit")
|| headers.contains_key("ratelimit-remaining")
|| headers.contains_key("retry-after");
if has_rate_headers && rate_limit_at_request.is_none() {
// Server has rate limit headers but hasn't blocked yet
}
}
}
Err(e) => {
let elapsed = start.elapsed().as_millis();
status_codes.push(0);
response_times.push(elapsed);
}
}
}
let mut findings = Vec::new();
let total_sent = status_codes.len();
let count_429 = status_codes.iter().filter(|&&s| s == 429).count();
let count_success = status_codes.iter().filter(|&&s| (200..300).contains(&s)).count();
// Calculate response time statistics
let avg_time = if !response_times.is_empty() {
response_times.iter().sum::<u128>() / response_times.len() as u128
} else {
0
};
let first_half_avg = if response_times.len() >= 4 {
let half = response_times.len() / 2;
response_times[..half].iter().sum::<u128>() / half as u128
} else {
avg_time
};
let second_half_avg = if response_times.len() >= 4 {
let half = response_times.len() / 2;
response_times[half..].iter().sum::<u128>() / (response_times.len() - half) as u128
} else {
avg_time
};
// Significant time degradation suggests possible (weak) rate limiting
let time_degradation = if first_half_avg > 0 {
(second_half_avg as f64 / first_half_avg as f64) - 1.0
} else {
0.0
};
let rate_data = json!({
"total_requests_sent": total_sent,
"status_429_count": count_429,
"success_count": count_success,
"rate_limit_at_request": rate_limit_at_request,
"avg_response_time_ms": avg_time,
"first_half_avg_ms": first_half_avg,
"second_half_avg_ms": second_half_avg,
"time_degradation_pct": (time_degradation * 100.0).round(),
});
if !got_429 && count_success == total_sent {
// No rate limiting detected at all
let evidence = DastEvidence {
request_method: method.to_string(),
request_url: url.to_string(),
request_headers: None,
request_body: body.map(String::from),
response_status: 200,
response_headers: None,
response_snippet: Some(format!(
"Sent {total_sent} rapid requests. All returned success (2xx). \
No 429 responses received. Avg response time: {avg_time}ms."
)),
screenshot_path: None,
payload: None,
response_time_ms: Some(avg_time as u64),
};
let mut finding = DastFinding::new(
String::new(),
target_id.clone(),
DastVulnType::RateLimitAbsent,
format!("No rate limiting on {} {}", method, url),
format!(
"The endpoint {} {} does not enforce rate limiting. \
{total_sent} rapid requests were all accepted with no 429 responses \
or noticeable degradation. This makes the endpoint vulnerable to \
brute force attacks and abuse.",
method, url
),
Severity::Medium,
url.to_string(),
method.to_string(),
);
finding.cwe = Some("CWE-770".to_string());
finding.evidence = vec![evidence];
finding.remediation = Some(
"Implement rate limiting on this endpoint. Use token bucket or sliding window \
algorithms. Return 429 Too Many Requests with a Retry-After header when the \
limit is exceeded."
.to_string(),
);
findings.push(finding);
warn!(url, method, total_sent, "No rate limiting detected");
} else if got_429 {
info!(
url,
method,
rate_limit_at = ?rate_limit_at_request,
"Rate limiting is enforced"
);
}
let count = findings.len();
Ok(PentestToolResult {
summary: if got_429 {
format!(
"Rate limiting is enforced on {method} {url}. \
429 response received after {} requests.",
rate_limit_at_request.unwrap_or(0)
)
} else if count > 0 {
format!(
"No rate limiting detected on {method} {url} after {total_sent} requests."
)
} else {
format!("Rate limit testing complete for {method} {url}.")
},
findings,
data: rate_data,
})
})
}
}