feat: per-repo issue tracker, Gitea support, PR review pipeline (#10)
Some checks failed
CI / Security Audit (push) Has been cancelled
CI / Tests (push) Has been cancelled
CI / Detect Changes (push) Has been cancelled
CI / Deploy Agent (push) Has been cancelled
CI / Deploy Dashboard (push) Has been cancelled
CI / Deploy Docs (push) Has been cancelled
CI / Deploy MCP (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Format (push) Successful in 4s

This commit was merged in pull request #10.
This commit is contained in:
2026-03-11 12:13:59 +00:00
parent be4b43ed64
commit 491665559f
22 changed files with 1582 additions and 122 deletions

View File

@@ -29,7 +29,7 @@ jobs:
name: Format
runs-on: docker
container:
image: rust:1.89-bookworm
image: rust:1.94-bookworm
steps:
- name: Checkout
run: |
@@ -47,7 +47,7 @@ jobs:
name: Clippy
runs-on: docker
container:
image: rust:1.89-bookworm
image: rust:1.94-bookworm
steps:
- name: Checkout
run: |
@@ -81,7 +81,7 @@ jobs:
runs-on: docker
if: github.ref == 'refs/heads/main'
container:
image: rust:1.89-bookworm
image: rust:1.94-bookworm
steps:
- name: Checkout
run: |
@@ -104,7 +104,7 @@ jobs:
runs-on: docker
needs: [fmt, clippy, audit]
container:
image: rust:1.89-bookworm
image: rust:1.94-bookworm
steps:
- name: Checkout
run: |

2
Cargo.lock generated
View File

@@ -650,6 +650,7 @@ dependencies = [
"dioxus-logger 0.6.2",
"dotenvy",
"gloo-timers",
"js-sys",
"mongodb",
"rand 0.9.2",
"reqwest",
@@ -665,6 +666,7 @@ dependencies = [
"tracing",
"url",
"uuid",
"wasm-bindgen",
"web-sys",
]

View File

@@ -43,4 +43,35 @@ impl ComplianceAgent {
);
orchestrator.run(repo_id, trigger).await
}
/// Run a PR review: scan the diff and post review comments.
pub async fn run_pr_review(
&self,
repo_id: &str,
pr_number: u64,
base_sha: &str,
head_sha: &str,
) -> Result<(), crate::error::AgentError> {
let repo = self
.db
.repositories()
.find_one(mongodb::bson::doc! {
"_id": mongodb::bson::oid::ObjectId::parse_str(repo_id)
.map_err(|e| crate::error::AgentError::Other(e.to_string()))?
})
.await?
.ok_or_else(|| {
crate::error::AgentError::Other(format!("Repository {repo_id} not found"))
})?;
let orchestrator = PipelineOrchestrator::new(
self.config.clone(),
self.db.clone(),
self.llm.clone(),
self.http.clone(),
);
orchestrator
.run_pr_review(&repo, repo_id, pr_number, base_sha, head_sha)
.await
}
}

View File

@@ -87,6 +87,20 @@ pub struct AddRepositoryRequest {
pub tracker_type: Option<TrackerType>,
pub tracker_owner: Option<String>,
pub tracker_repo: Option<String>,
pub tracker_token: Option<String>,
pub scan_schedule: Option<String>,
}
#[derive(Deserialize)]
pub struct UpdateRepositoryRequest {
pub name: Option<String>,
pub default_branch: Option<String>,
pub auth_token: Option<String>,
pub auth_username: Option<String>,
pub tracker_type: Option<TrackerType>,
pub tracker_owner: Option<String>,
pub tracker_repo: Option<String>,
pub tracker_token: Option<String>,
pub scan_schedule: Option<String>,
}
@@ -318,6 +332,7 @@ pub async fn add_repository(
repo.tracker_type = req.tracker_type;
repo.tracker_owner = req.tracker_owner;
repo.tracker_repo = req.tracker_repo;
repo.tracker_token = req.tracker_token;
repo.scan_schedule = req.scan_schedule;
agent
@@ -339,6 +354,61 @@ pub async fn add_repository(
}))
}
#[tracing::instrument(skip_all, fields(repo_id = %id))]
pub async fn update_repository(
Extension(agent): AgentExt,
Path(id): Path<String>,
Json(req): Json<UpdateRepositoryRequest>,
) -> Result<Json<serde_json::Value>, StatusCode> {
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
let mut set_doc = doc! { "updated_at": mongodb::bson::DateTime::now() };
if let Some(name) = &req.name {
set_doc.insert("name", name);
}
if let Some(branch) = &req.default_branch {
set_doc.insert("default_branch", branch);
}
if let Some(token) = &req.auth_token {
set_doc.insert("auth_token", token);
}
if let Some(username) = &req.auth_username {
set_doc.insert("auth_username", username);
}
if let Some(tracker_type) = &req.tracker_type {
set_doc.insert("tracker_type", tracker_type.to_string());
}
if let Some(owner) = &req.tracker_owner {
set_doc.insert("tracker_owner", owner);
}
if let Some(repo) = &req.tracker_repo {
set_doc.insert("tracker_repo", repo);
}
if let Some(token) = &req.tracker_token {
set_doc.insert("tracker_token", token);
}
if let Some(schedule) = &req.scan_schedule {
set_doc.insert("scan_schedule", schedule);
}
let result = agent
.db
.repositories()
.update_one(doc! { "_id": oid }, doc! { "$set": set_doc })
.await
.map_err(|e| {
tracing::warn!("Failed to update repository: {e}");
StatusCode::INTERNAL_SERVER_ERROR
})?;
if result.matched_count == 0 {
return Err(StatusCode::NOT_FOUND);
}
Ok(Json(serde_json::json!({ "status": "updated" })))
}
#[tracing::instrument(skip_all)]
pub async fn get_ssh_public_key(
Extension(agent): AgentExt,
@@ -363,6 +433,32 @@ pub async fn trigger_scan(
Ok(Json(serde_json::json!({ "status": "scan_triggered" })))
}
/// Return the webhook secret for a repository (used by dashboard to display it)
pub async fn get_webhook_config(
Extension(agent): AgentExt,
Path(id): Path<String>,
) -> Result<Json<serde_json::Value>, StatusCode> {
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
let repo = agent
.db
.repositories()
.find_one(doc! { "_id": oid })
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::NOT_FOUND)?;
let tracker_type = repo
.tracker_type
.as_ref()
.map(|t| t.to_string())
.unwrap_or_else(|| "gitea".to_string());
Ok(Json(serde_json::json!({
"webhook_secret": repo.webhook_secret,
"tracker_type": tracker_type,
})))
}
#[tracing::instrument(skip_all, fields(repo_id = %id))]
pub async fn delete_repository(
Extension(agent): AgentExt,

View File

@@ -2,6 +2,7 @@ use axum::routing::{delete, get, patch, post};
use axum::Router;
use crate::api::handlers;
use crate::webhooks;
pub fn build_router() -> Router {
Router::new()
@@ -19,7 +20,11 @@ pub fn build_router() -> Router {
)
.route(
"/api/v1/repositories/{id}",
delete(handlers::delete_repository),
delete(handlers::delete_repository).patch(handlers::update_repository),
)
.route(
"/api/v1/repositories/{id}/webhook-config",
get(handlers::get_webhook_config),
)
.route("/api/v1/findings", get(handlers::list_findings))
.route("/api/v1/findings/{id}", get(handlers::get_finding))
@@ -94,4 +99,17 @@ pub fn build_router() -> Router {
"/api/v1/chat/{repo_id}/status",
get(handlers::chat::embedding_status),
)
// Webhook endpoints (proxied through dashboard)
.route(
"/webhook/github/{repo_id}",
post(webhooks::github::handle_github_webhook),
)
.route(
"/webhook/gitlab/{repo_id}",
post(webhooks::gitlab::handle_gitlab_webhook),
)
.route(
"/webhook/gitea/{repo_id}",
post(webhooks::gitea::handle_gitea_webhook),
)
}

View File

@@ -132,6 +132,18 @@ impl GitOps {
Ok(())
}
/// Build credentials from agent config + per-repo overrides
pub fn make_repo_credentials(
config: &compliance_core::AgentConfig,
repo: &compliance_core::models::TrackedRepository,
) -> RepoCredentials {
RepoCredentials {
ssh_key_path: Some(config.ssh_key_path.clone()),
auth_token: repo.auth_token.clone(),
auth_username: repo.auth_username.clone(),
}
}
pub fn get_head_sha(repo_path: &Path) -> Result<String, AgentError> {
let repo = Repository::open(repo_path)?;
let head = repo.head()?;

View File

@@ -4,6 +4,7 @@ use mongodb::bson::doc;
use tracing::Instrument;
use compliance_core::models::*;
use compliance_core::traits::issue_tracker::IssueTracker;
use compliance_core::traits::Scanner;
use compliance_core::AgentConfig;
@@ -12,12 +13,90 @@ use crate::error::AgentError;
use crate::llm::LlmClient;
use crate::pipeline::code_review::CodeReviewScanner;
use crate::pipeline::cve::CveScanner;
use crate::pipeline::git::{GitOps, RepoCredentials};
use crate::pipeline::git::GitOps;
use crate::pipeline::gitleaks::GitleaksScanner;
use crate::pipeline::lint::LintScanner;
use crate::pipeline::patterns::{GdprPatternScanner, OAuthPatternScanner};
use crate::pipeline::sbom::SbomScanner;
use crate::pipeline::semgrep::SemgrepScanner;
use crate::trackers;
/// Enum dispatch for issue trackers (async traits aren't dyn-compatible).
enum TrackerDispatch {
GitHub(trackers::github::GitHubTracker),
GitLab(trackers::gitlab::GitLabTracker),
Gitea(trackers::gitea::GiteaTracker),
Jira(trackers::jira::JiraTracker),
}
impl TrackerDispatch {
fn name(&self) -> &str {
match self {
Self::GitHub(t) => t.name(),
Self::GitLab(t) => t.name(),
Self::Gitea(t) => t.name(),
Self::Jira(t) => t.name(),
}
}
async fn create_issue(
&self,
owner: &str,
repo: &str,
title: &str,
body: &str,
labels: &[String],
) -> Result<TrackerIssue, compliance_core::error::CoreError> {
match self {
Self::GitHub(t) => t.create_issue(owner, repo, title, body, labels).await,
Self::GitLab(t) => t.create_issue(owner, repo, title, body, labels).await,
Self::Gitea(t) => t.create_issue(owner, repo, title, body, labels).await,
Self::Jira(t) => t.create_issue(owner, repo, title, body, labels).await,
}
}
async fn find_existing_issue(
&self,
owner: &str,
repo: &str,
fingerprint: &str,
) -> Result<Option<TrackerIssue>, compliance_core::error::CoreError> {
match self {
Self::GitHub(t) => t.find_existing_issue(owner, repo, fingerprint).await,
Self::GitLab(t) => t.find_existing_issue(owner, repo, fingerprint).await,
Self::Gitea(t) => t.find_existing_issue(owner, repo, fingerprint).await,
Self::Jira(t) => t.find_existing_issue(owner, repo, fingerprint).await,
}
}
async fn create_pr_review(
&self,
owner: &str,
repo: &str,
pr_number: u64,
body: &str,
comments: Vec<compliance_core::traits::issue_tracker::ReviewComment>,
) -> Result<(), compliance_core::error::CoreError> {
match self {
Self::GitHub(t) => {
t.create_pr_review(owner, repo, pr_number, body, comments)
.await
}
Self::GitLab(t) => {
t.create_pr_review(owner, repo, pr_number, body, comments)
.await
}
Self::Gitea(t) => {
t.create_pr_review(owner, repo, pr_number, body, comments)
.await
}
Self::Jira(t) => {
t.create_pr_review(owner, repo, pr_number, body, comments)
.await
}
}
}
}
/// Context from graph analysis passed to LLM triage for enhanced filtering
#[derive(Debug)]
@@ -121,11 +200,7 @@ impl PipelineOrchestrator {
// Stage 0: Change detection
tracing::info!("[{repo_id}] Stage 0: Change detection");
let creds = RepoCredentials {
ssh_key_path: Some(self.config.ssh_key_path.clone()),
auth_token: repo.auth_token.clone(),
auth_username: repo.auth_username.clone(),
};
let creds = GitOps::make_repo_credentials(&self.config, repo);
let git_ops = GitOps::new(&self.config.git_clone_base_path, creds);
let repo_path = git_ops.clone_or_fetch(&repo.git_url, &repo.name)?;
@@ -293,6 +368,7 @@ impl PipelineOrchestrator {
// Dedup against existing findings and insert new ones
let mut new_count = 0u32;
let mut new_findings: Vec<Finding> = Vec::new();
for mut finding in all_findings {
finding.scan_run_id = Some(scan_run_id.to_string());
// Check if fingerprint already exists
@@ -302,7 +378,9 @@ impl PipelineOrchestrator {
.find_one(doc! { "fingerprint": &finding.fingerprint })
.await?;
if existing.is_none() {
self.db.findings().insert_one(&finding).await?;
let result = self.db.findings().insert_one(&finding).await?;
finding.id = result.inserted_id.as_object_id();
new_findings.push(finding);
new_count += 1;
}
}
@@ -351,7 +429,12 @@ impl PipelineOrchestrator {
// Stage 6: Issue Creation
tracing::info!("[{repo_id}] Stage 6: Issue Creation");
self.update_phase(scan_run_id, "issue_creation").await;
// Issue creation is handled by the trackers module - deferred to agent
if let Err(e) = self
.create_tracker_issues(repo, &repo_id, &new_findings)
.await
{
tracing::warn!("[{repo_id}] Issue creation failed: {e}");
}
// Stage 7: Update repository
self.db
@@ -477,6 +560,339 @@ impl PipelineOrchestrator {
}
}
/// Build an issue tracker client from a repository's tracker configuration.
/// Returns `None` if the repo has no tracker configured.
fn build_tracker(&self, repo: &TrackedRepository) -> Option<TrackerDispatch> {
let tracker_type = repo.tracker_type.as_ref()?;
// Per-repo token takes precedence, fall back to global config
match tracker_type {
TrackerType::GitHub => {
let token = repo.tracker_token.clone().or_else(|| {
self.config.github_token.as_ref().map(|t| {
use secrecy::ExposeSecret;
t.expose_secret().to_string()
})
})?;
let secret = secrecy::SecretString::from(token);
match trackers::github::GitHubTracker::new(&secret) {
Ok(t) => Some(TrackerDispatch::GitHub(t)),
Err(e) => {
tracing::warn!("Failed to build GitHub tracker: {e}");
None
}
}
}
TrackerType::GitLab => {
let base_url = self
.config
.gitlab_url
.clone()
.unwrap_or_else(|| "https://gitlab.com".to_string());
let token = repo.tracker_token.clone().or_else(|| {
self.config.gitlab_token.as_ref().map(|t| {
use secrecy::ExposeSecret;
t.expose_secret().to_string()
})
})?;
let secret = secrecy::SecretString::from(token);
Some(TrackerDispatch::GitLab(
trackers::gitlab::GitLabTracker::new(base_url, secret),
))
}
TrackerType::Gitea => {
let token = repo.tracker_token.clone()?;
let base_url = extract_base_url(&repo.git_url)?;
let secret = secrecy::SecretString::from(token);
Some(TrackerDispatch::Gitea(trackers::gitea::GiteaTracker::new(
base_url, secret,
)))
}
TrackerType::Jira => {
let base_url = self.config.jira_url.clone()?;
let email = self.config.jira_email.clone()?;
let project_key = self.config.jira_project_key.clone()?;
let token = repo.tracker_token.clone().or_else(|| {
self.config.jira_api_token.as_ref().map(|t| {
use secrecy::ExposeSecret;
t.expose_secret().to_string()
})
})?;
let secret = secrecy::SecretString::from(token);
Some(TrackerDispatch::Jira(trackers::jira::JiraTracker::new(
base_url,
email,
secret,
project_key,
)))
}
}
}
/// Create tracker issues for new findings (severity >= Medium).
/// Checks for duplicates via fingerprint search before creating.
#[tracing::instrument(skip_all, fields(repo_id = %repo_id))]
async fn create_tracker_issues(
&self,
repo: &TrackedRepository,
repo_id: &str,
new_findings: &[Finding],
) -> Result<(), AgentError> {
let tracker = match self.build_tracker(repo) {
Some(t) => t,
None => {
tracing::info!("[{repo_id}] No issue tracker configured, skipping");
return Ok(());
}
};
let owner = match repo.tracker_owner.as_deref() {
Some(o) => o,
None => {
tracing::warn!("[{repo_id}] tracker_owner not set, skipping issue creation");
return Ok(());
}
};
let tracker_repo_name = match repo.tracker_repo.as_deref() {
Some(r) => r,
None => {
tracing::warn!("[{repo_id}] tracker_repo not set, skipping issue creation");
return Ok(());
}
};
// Only create issues for medium+ severity findings
let actionable: Vec<&Finding> = new_findings
.iter()
.filter(|f| {
matches!(
f.severity,
Severity::Medium | Severity::High | Severity::Critical
)
})
.collect();
if actionable.is_empty() {
tracing::info!("[{repo_id}] No medium+ findings, skipping issue creation");
return Ok(());
}
tracing::info!(
"[{repo_id}] Creating issues for {} findings via {}",
actionable.len(),
tracker.name()
);
let mut created = 0u32;
for finding in actionable {
// Check if an issue already exists for this fingerprint
match tracker
.find_existing_issue(owner, tracker_repo_name, &finding.fingerprint)
.await
{
Ok(Some(existing)) => {
tracing::debug!(
"[{repo_id}] Issue already exists for {}: {}",
finding.fingerprint,
existing.external_url
);
continue;
}
Ok(None) => {}
Err(e) => {
tracing::warn!("[{repo_id}] Failed to search for existing issue: {e}");
// Continue and try to create anyway
}
}
let title = format!(
"[{}] {}: {}",
finding.severity, finding.scanner, finding.title
);
let body = format_issue_body(finding);
let labels = vec![
format!("severity:{}", finding.severity),
format!("scanner:{}", finding.scanner),
"compliance-scanner".to_string(),
];
match tracker
.create_issue(owner, tracker_repo_name, &title, &body, &labels)
.await
{
Ok(mut issue) => {
issue.finding_id = finding
.id
.as_ref()
.map(|id| id.to_hex())
.unwrap_or_default();
// Update the finding with the issue URL
if let Some(finding_id) = &finding.id {
let _ = self
.db
.findings()
.update_one(
doc! { "_id": finding_id },
doc! { "$set": { "tracker_issue_url": &issue.external_url } },
)
.await;
}
// Store the tracker issue record
if let Err(e) = self.db.tracker_issues().insert_one(&issue).await {
tracing::warn!("[{repo_id}] Failed to store tracker issue: {e}");
}
created += 1;
}
Err(e) => {
tracing::warn!(
"[{repo_id}] Failed to create issue for {}: {e}",
finding.fingerprint
);
}
}
}
tracing::info!("[{repo_id}] Created {created} tracker issues");
Ok(())
}
/// Run an incremental scan on a PR diff and post review comments.
#[tracing::instrument(skip_all, fields(repo_id = %repo_id, pr_number))]
pub async fn run_pr_review(
&self,
repo: &TrackedRepository,
repo_id: &str,
pr_number: u64,
base_sha: &str,
head_sha: &str,
) -> Result<(), AgentError> {
let tracker = match self.build_tracker(repo) {
Some(t) => t,
None => {
tracing::warn!("[{repo_id}] No tracker configured, cannot post PR review");
return Ok(());
}
};
let owner = repo.tracker_owner.as_deref().unwrap_or("");
let tracker_repo_name = repo.tracker_repo.as_deref().unwrap_or("");
if owner.is_empty() || tracker_repo_name.is_empty() {
tracing::warn!("[{repo_id}] tracker_owner or tracker_repo not set");
return Ok(());
}
// Clone/fetch the repo
let creds = GitOps::make_repo_credentials(&self.config, repo);
let git_ops = GitOps::new(&self.config.git_clone_base_path, creds);
let repo_path = git_ops.clone_or_fetch(&repo.git_url, &repo.name)?;
// Get diff between base and head
let diff_files = GitOps::get_diff_content(&repo_path, base_sha, head_sha)?;
if diff_files.is_empty() {
tracing::info!("[{repo_id}] PR #{pr_number}: no diff files, skipping review");
return Ok(());
}
// Run semgrep on the full repo but we'll filter findings to changed files
let changed_paths: std::collections::HashSet<String> =
diff_files.iter().map(|f| f.path.clone()).collect();
let mut pr_findings: Vec<Finding> = Vec::new();
// SAST scan (semgrep)
match SemgrepScanner.scan(&repo_path, repo_id).await {
Ok(output) => {
for f in output.findings {
if let Some(fp) = &f.file_path {
if changed_paths.contains(fp.as_str()) {
pr_findings.push(f);
}
}
}
}
Err(e) => tracing::warn!("[{repo_id}] PR semgrep failed: {e}"),
}
// LLM code review on the diff
let reviewer = CodeReviewScanner::new(self.llm.clone());
let review_output = reviewer
.review_diff(&repo_path, repo_id, base_sha, head_sha)
.await;
pr_findings.extend(review_output.findings);
if pr_findings.is_empty() {
// Post a clean review
if let Err(e) = tracker
.create_pr_review(
owner,
tracker_repo_name,
pr_number,
"Compliance scan: no issues found in this PR.",
Vec::new(),
)
.await
{
tracing::warn!("[{repo_id}] Failed to post clean PR review: {e}");
}
return Ok(());
}
// Build review comments from findings
let mut review_comments = Vec::new();
for finding in &pr_findings {
if let (Some(path), Some(line)) = (&finding.file_path, finding.line_number) {
let comment_body = format!(
"**[{}] {}**\n\n{}\n\n*Scanner: {} | {}*",
finding.severity,
finding.title,
finding.description,
finding.scanner,
finding
.cwe
.as_deref()
.map(|c| format!("CWE: {c}"))
.unwrap_or_default(),
);
review_comments.push(compliance_core::traits::issue_tracker::ReviewComment {
path: path.clone(),
line,
body: comment_body,
});
}
}
let summary = format!(
"Compliance scan found **{}** issue(s) in this PR:\n\n{}",
pr_findings.len(),
pr_findings
.iter()
.map(|f| format!("- **[{}]** {}: {}", f.severity, f.scanner, f.title))
.collect::<Vec<_>>()
.join("\n"),
);
if let Err(e) = tracker
.create_pr_review(
owner,
tracker_repo_name,
pr_number,
&summary,
review_comments,
)
.await
{
tracing::warn!("[{repo_id}] Failed to post PR review: {e}");
} else {
tracing::info!(
"[{repo_id}] Posted PR review on #{pr_number} with {} findings",
pr_findings.len()
);
}
Ok(())
}
async fn update_phase(&self, scan_run_id: &str, phase: &str) {
if let Ok(oid) = mongodb::bson::oid::ObjectId::parse_str(scan_run_id) {
let _ = self
@@ -493,3 +909,73 @@ impl PipelineOrchestrator {
}
}
}
/// Extract the scheme + host from a git URL.
/// e.g. "https://gitea.example.com/owner/repo.git" → "https://gitea.example.com"
/// e.g. "ssh://git@gitea.example.com:22/owner/repo.git" → "https://gitea.example.com"
fn extract_base_url(git_url: &str) -> Option<String> {
if let Some(rest) = git_url.strip_prefix("https://") {
let host = rest.split('/').next()?;
Some(format!("https://{host}"))
} else if let Some(rest) = git_url.strip_prefix("http://") {
let host = rest.split('/').next()?;
Some(format!("http://{host}"))
} else if let Some(rest) = git_url.strip_prefix("ssh://") {
// ssh://git@host:port/path → extract host
let after_at = rest.find('@').map(|i| &rest[i + 1..]).unwrap_or(rest);
let host = after_at.split(&[':', '/'][..]).next()?;
Some(format!("https://{host}"))
} else if let Some(at_pos) = git_url.find('@') {
// SCP-style: git@host:owner/repo.git
let after_at = &git_url[at_pos + 1..];
let host = after_at.split(':').next()?;
Some(format!("https://{host}"))
} else {
None
}
}
/// Format a finding into a markdown issue body for the tracker.
fn format_issue_body(finding: &Finding) -> String {
let mut body = String::new();
body.push_str(&format!("## {} Finding\n\n", finding.severity));
body.push_str(&format!("**Scanner:** {}\n", finding.scanner));
body.push_str(&format!("**Severity:** {}\n", finding.severity));
if let Some(rule) = &finding.rule_id {
body.push_str(&format!("**Rule:** {}\n", rule));
}
if let Some(cwe) = &finding.cwe {
body.push_str(&format!("**CWE:** {}\n", cwe));
}
body.push_str(&format!("\n### Description\n\n{}\n", finding.description));
if let Some(file_path) = &finding.file_path {
body.push_str(&format!("\n### Location\n\n**File:** `{}`", file_path));
if let Some(line) = finding.line_number {
body.push_str(&format!(" (line {})", line));
}
body.push('\n');
}
if let Some(snippet) = &finding.code_snippet {
body.push_str(&format!("\n### Code\n\n```\n{}\n```\n", snippet));
}
if let Some(remediation) = &finding.remediation {
body.push_str(&format!("\n### Remediation\n\n{}\n", remediation));
}
if let Some(fix) = &finding.suggested_fix {
body.push_str(&format!("\n### Suggested Fix\n\n```\n{}\n```\n", fix));
}
body.push_str(&format!(
"\n---\n*Fingerprint:* `{}`\n*Generated by compliance-scanner*",
finding.fingerprint
));
body
}

View File

@@ -0,0 +1,213 @@
use compliance_core::error::CoreError;
use compliance_core::models::{TrackerIssue, TrackerType};
use compliance_core::traits::issue_tracker::{IssueTracker, ReviewComment};
use secrecy::{ExposeSecret, SecretString};
pub struct GiteaTracker {
base_url: String,
http: reqwest::Client,
token: SecretString,
}
impl GiteaTracker {
pub fn new(base_url: String, token: SecretString) -> Self {
Self {
base_url: base_url.trim_end_matches('/').to_string(),
http: reqwest::Client::new(),
token,
}
}
fn api_url(&self, path: &str) -> String {
format!("{}/api/v1{}", self.base_url, path)
}
}
impl IssueTracker for GiteaTracker {
fn name(&self) -> &str {
"gitea"
}
async fn create_issue(
&self,
owner: &str,
repo: &str,
title: &str,
body: &str,
labels: &[String],
) -> Result<TrackerIssue, CoreError> {
let url = self.api_url(&format!("/repos/{owner}/{repo}/issues"));
// Gitea expects label IDs (integers), not names. Append label names
// to the body instead since resolving IDs would require extra API calls.
let mut full_body = body.to_string();
if !labels.is_empty() {
full_body.push_str("\n\n**Labels:** ");
full_body.push_str(&labels.join(", "));
}
let payload = serde_json::json!({
"title": title,
"body": full_body,
});
let resp = self
.http
.post(&url)
.header(
"Authorization",
format!("token {}", self.token.expose_secret()),
)
.json(&payload)
.send()
.await
.map_err(|e| CoreError::IssueTracker(format!("Gitea create issue failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(CoreError::IssueTracker(format!(
"Gitea returned {status}: {text}"
)));
}
let issue: serde_json::Value = resp
.json()
.await
.map_err(|e| CoreError::IssueTracker(format!("Failed to parse Gitea response: {e}")))?;
Ok(TrackerIssue::new(
String::new(),
TrackerType::Gitea,
issue["number"].to_string(),
issue["html_url"].as_str().unwrap_or("").to_string(),
title.to_string(),
))
}
async fn update_issue_status(
&self,
owner: &str,
repo: &str,
external_id: &str,
status: &str,
) -> Result<(), CoreError> {
let url = self.api_url(&format!("/repos/{owner}/{repo}/issues/{external_id}"));
let state = match status {
"closed" | "resolved" => "closed",
_ => "open",
};
self.http
.patch(&url)
.header(
"Authorization",
format!("token {}", self.token.expose_secret()),
)
.json(&serde_json::json!({ "state": state }))
.send()
.await
.map_err(|e| CoreError::IssueTracker(format!("Gitea update issue failed: {e}")))?;
Ok(())
}
async fn add_comment(
&self,
owner: &str,
repo: &str,
external_id: &str,
body: &str,
) -> Result<(), CoreError> {
let url = self.api_url(&format!(
"/repos/{owner}/{repo}/issues/{external_id}/comments"
));
self.http
.post(&url)
.header(
"Authorization",
format!("token {}", self.token.expose_secret()),
)
.json(&serde_json::json!({ "body": body }))
.send()
.await
.map_err(|e| CoreError::IssueTracker(format!("Gitea add comment failed: {e}")))?;
Ok(())
}
async fn create_pr_review(
&self,
owner: &str,
repo: &str,
pr_number: u64,
body: &str,
comments: Vec<ReviewComment>,
) -> Result<(), CoreError> {
let url = self.api_url(&format!("/repos/{owner}/{repo}/pulls/{pr_number}/reviews"));
let review_comments: Vec<serde_json::Value> = comments
.iter()
.map(|c| {
serde_json::json!({
"path": c.path,
"new_position": c.line,
"body": c.body,
})
})
.collect();
self.http
.post(&url)
.header(
"Authorization",
format!("token {}", self.token.expose_secret()),
)
.json(&serde_json::json!({
"body": body,
"event": "COMMENT",
"comments": review_comments,
}))
.send()
.await
.map_err(|e| CoreError::IssueTracker(format!("Gitea PR review failed: {e}")))?;
Ok(())
}
async fn find_existing_issue(
&self,
owner: &str,
repo: &str,
fingerprint: &str,
) -> Result<Option<TrackerIssue>, CoreError> {
let url = self.api_url(&format!(
"/repos/{owner}/{repo}/issues?type=issues&state=open&q={fingerprint}"
));
let resp = self
.http
.get(&url)
.header(
"Authorization",
format!("token {}", self.token.expose_secret()),
)
.send()
.await
.map_err(|e| CoreError::IssueTracker(format!("Gitea search failed: {e}")))?;
let issues: Vec<serde_json::Value> = resp.json().await.unwrap_or_default();
if let Some(issue) = issues.first() {
Ok(Some(TrackerIssue::new(
String::new(),
TrackerType::Gitea,
issue["number"].to_string(),
issue["html_url"].as_str().unwrap_or("").to_string(),
issue["title"].as_str().unwrap_or("").to_string(),
)))
} else {
Ok(None)
}
}
}

View File

@@ -1,3 +1,4 @@
pub mod gitea;
pub mod github;
pub mod gitlab;
pub mod jira;

View File

@@ -0,0 +1,138 @@
use std::sync::Arc;
use axum::body::Bytes;
use axum::extract::{Extension, Path};
use axum::http::{HeaderMap, StatusCode};
use hmac::{Hmac, Mac};
use sha2::Sha256;
use compliance_core::models::ScanTrigger;
use crate::agent::ComplianceAgent;
type HmacSha256 = Hmac<Sha256>;
pub async fn handle_gitea_webhook(
Extension(agent): Extension<Arc<ComplianceAgent>>,
Path(repo_id): Path<String>,
headers: HeaderMap,
body: Bytes,
) -> StatusCode {
// Look up the repo to get its webhook secret
let oid = match mongodb::bson::oid::ObjectId::parse_str(&repo_id) {
Ok(oid) => oid,
Err(_) => return StatusCode::NOT_FOUND,
};
let repo = match agent
.db
.repositories()
.find_one(mongodb::bson::doc! { "_id": oid })
.await
{
Ok(Some(repo)) => repo,
_ => {
tracing::warn!("Gitea webhook: repo {repo_id} not found");
return StatusCode::NOT_FOUND;
}
};
// Verify HMAC-SHA256 signature using the per-repo secret
if let Some(secret) = &repo.webhook_secret {
let signature = headers
.get("x-gitea-signature")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
if !verify_signature(secret, &body, signature) {
tracing::warn!("Gitea webhook: invalid signature for repo {repo_id}");
return StatusCode::UNAUTHORIZED;
}
}
let event = headers
.get("x-gitea-event")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let payload: serde_json::Value = match serde_json::from_slice(&body) {
Ok(v) => v,
Err(e) => {
tracing::warn!("Gitea webhook: invalid JSON: {e}");
return StatusCode::BAD_REQUEST;
}
};
match event {
"push" => {
let agent_clone = (*agent).clone();
let repo_id = repo_id.clone();
tokio::spawn(async move {
tracing::info!("Gitea push webhook: triggering scan for {repo_id}");
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
tracing::error!("Webhook-triggered scan failed: {e}");
}
});
StatusCode::OK
}
"pull_request" => handle_pull_request(agent, &repo_id, &payload).await,
_ => {
tracing::debug!("Gitea webhook: ignoring event '{event}'");
StatusCode::OK
}
}
}
async fn handle_pull_request(
agent: Arc<ComplianceAgent>,
repo_id: &str,
payload: &serde_json::Value,
) -> StatusCode {
let action = payload["action"].as_str().unwrap_or("");
if action != "opened" && action != "synchronized" {
return StatusCode::OK;
}
let pr_number = payload["pull_request"]["number"].as_u64().unwrap_or(0);
let head_sha = payload["pull_request"]["head"]["sha"]
.as_str()
.unwrap_or("");
let base_sha = payload["pull_request"]["base"]["sha"]
.as_str()
.unwrap_or("");
if pr_number == 0 || head_sha.is_empty() || base_sha.is_empty() {
tracing::warn!("Gitea PR webhook: missing required fields");
return StatusCode::BAD_REQUEST;
}
let repo_id = repo_id.to_string();
let head_sha = head_sha.to_string();
let base_sha = base_sha.to_string();
let agent_clone = (*agent).clone();
tokio::spawn(async move {
tracing::info!("Gitea PR webhook: reviewing PR #{pr_number} on {repo_id}");
if let Err(e) = agent_clone
.run_pr_review(&repo_id, pr_number, &base_sha, &head_sha)
.await
{
tracing::error!("PR review failed for #{pr_number}: {e}");
}
});
StatusCode::OK
}
fn verify_signature(secret: &str, body: &[u8], signature: &str) -> bool {
// Gitea sends raw hex (no sha256= prefix)
let sig_bytes = match hex::decode(signature) {
Ok(b) => b,
Err(_) => return false,
};
let mut mac = match HmacSha256::new_from_slice(secret.as_bytes()) {
Ok(m) => m,
Err(_) => return false,
};
mac.update(body);
mac.verify_slice(&sig_bytes).is_ok()
}

View File

@@ -1,10 +1,9 @@
use std::sync::Arc;
use axum::body::Bytes;
use axum::extract::Extension;
use axum::extract::{Extension, Path};
use axum::http::{HeaderMap, StatusCode};
use hmac::{Hmac, Mac};
use secrecy::ExposeSecret;
use sha2::Sha256;
use compliance_core::models::ScanTrigger;
@@ -15,18 +14,37 @@ type HmacSha256 = Hmac<Sha256>;
pub async fn handle_github_webhook(
Extension(agent): Extension<Arc<ComplianceAgent>>,
Path(repo_id): Path<String>,
headers: HeaderMap,
body: Bytes,
) -> StatusCode {
// Verify HMAC signature
if let Some(secret) = &agent.config.github_webhook_secret {
// Look up the repo to get its webhook secret
let oid = match mongodb::bson::oid::ObjectId::parse_str(&repo_id) {
Ok(oid) => oid,
Err(_) => return StatusCode::NOT_FOUND,
};
let repo = match agent
.db
.repositories()
.find_one(mongodb::bson::doc! { "_id": oid })
.await
{
Ok(Some(repo)) => repo,
_ => {
tracing::warn!("GitHub webhook: repo {repo_id} not found");
return StatusCode::NOT_FOUND;
}
};
// Verify HMAC-SHA256 signature using the per-repo secret
if let Some(secret) = &repo.webhook_secret {
let signature = headers
.get("x-hub-signature-256")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
if !verify_signature(secret.expose_secret(), &body, signature) {
tracing::warn!("GitHub webhook: invalid signature");
if !verify_signature(secret, &body, signature) {
tracing::warn!("GitHub webhook: invalid signature for repo {repo_id}");
return StatusCode::UNAUTHORIZED;
}
}
@@ -45,8 +63,18 @@ pub async fn handle_github_webhook(
};
match event {
"push" => handle_push(agent, &payload).await,
"pull_request" => handle_pull_request(agent, &payload).await,
"push" => {
let agent_clone = (*agent).clone();
let repo_id = repo_id.clone();
tokio::spawn(async move {
tracing::info!("GitHub push webhook: triggering scan for {repo_id}");
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
tracing::error!("Webhook-triggered scan failed: {e}");
}
});
StatusCode::OK
}
"pull_request" => handle_pull_request(agent, &repo_id, &payload).await,
_ => {
tracing::debug!("GitHub webhook: ignoring event '{event}'");
StatusCode::OK
@@ -54,43 +82,9 @@ pub async fn handle_github_webhook(
}
}
async fn handle_push(agent: Arc<ComplianceAgent>, payload: &serde_json::Value) -> StatusCode {
let repo_url = payload["repository"]["clone_url"]
.as_str()
.or_else(|| payload["repository"]["html_url"].as_str())
.unwrap_or("");
if repo_url.is_empty() {
return StatusCode::BAD_REQUEST;
}
// Find matching tracked repository
let repo = agent
.db
.repositories()
.find_one(mongodb::bson::doc! { "git_url": repo_url })
.await
.ok()
.flatten();
if let Some(repo) = repo {
let repo_id = repo.id.map(|id| id.to_hex()).unwrap_or_default();
let agent_clone = (*agent).clone();
tokio::spawn(async move {
tracing::info!("GitHub push webhook: triggering scan for {repo_id}");
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
tracing::error!("Webhook-triggered scan failed: {e}");
}
});
} else {
tracing::debug!("GitHub push webhook: no tracked repo for {repo_url}");
}
StatusCode::OK
}
async fn handle_pull_request(
_agent: Arc<ComplianceAgent>,
agent: Arc<ComplianceAgent>,
repo_id: &str,
payload: &serde_json::Value,
) -> StatusCode {
let action = payload["action"].as_str().unwrap_or("");
@@ -98,21 +92,37 @@ async fn handle_pull_request(
return StatusCode::OK;
}
let repo_url = payload["repository"]["clone_url"].as_str().unwrap_or("");
let pr_number = payload["pull_request"]["number"].as_u64().unwrap_or(0);
let head_sha = payload["pull_request"]["head"]["sha"]
.as_str()
.unwrap_or("");
let base_sha = payload["pull_request"]["base"]["sha"]
.as_str()
.unwrap_or("");
if repo_url.is_empty() || pr_number == 0 {
if pr_number == 0 || head_sha.is_empty() || base_sha.is_empty() {
return StatusCode::BAD_REQUEST;
}
tracing::info!("GitHub PR webhook: PR #{pr_number} {action} on {repo_url}");
// PR review scan would be triggered here - runs incremental SAST on diff
// and posts review comments via the GitHub tracker
let repo_id = repo_id.to_string();
let head_sha = head_sha.to_string();
let base_sha = base_sha.to_string();
let agent_clone = (*agent).clone();
tokio::spawn(async move {
tracing::info!("GitHub PR webhook: reviewing PR #{pr_number} on {repo_id}");
if let Err(e) = agent_clone
.run_pr_review(&repo_id, pr_number, &base_sha, &head_sha)
.await
{
tracing::error!("PR review failed for #{pr_number}: {e}");
}
});
StatusCode::OK
}
fn verify_signature(secret: &str, body: &[u8], signature: &str) -> bool {
// GitHub sends sha256=<hex>
let sig = signature.strip_prefix("sha256=").unwrap_or(signature);
let sig_bytes = match hex::decode(sig) {
Ok(b) => b,

View File

@@ -1,9 +1,8 @@
use std::sync::Arc;
use axum::body::Bytes;
use axum::extract::Extension;
use axum::extract::{Extension, Path};
use axum::http::{HeaderMap, StatusCode};
use secrecy::ExposeSecret;
use compliance_core::models::ScanTrigger;
@@ -11,18 +10,37 @@ use crate::agent::ComplianceAgent;
pub async fn handle_gitlab_webhook(
Extension(agent): Extension<Arc<ComplianceAgent>>,
Path(repo_id): Path<String>,
headers: HeaderMap,
body: Bytes,
) -> StatusCode {
// Verify GitLab token
if let Some(secret) = &agent.config.gitlab_webhook_secret {
// Look up the repo to get its webhook secret
let oid = match mongodb::bson::oid::ObjectId::parse_str(&repo_id) {
Ok(oid) => oid,
Err(_) => return StatusCode::NOT_FOUND,
};
let repo = match agent
.db
.repositories()
.find_one(mongodb::bson::doc! { "_id": oid })
.await
{
Ok(Some(repo)) => repo,
_ => {
tracing::warn!("GitLab webhook: repo {repo_id} not found");
return StatusCode::NOT_FOUND;
}
};
// GitLab sends the secret token in X-Gitlab-Token header (plain text comparison)
if let Some(secret) = &repo.webhook_secret {
let token = headers
.get("x-gitlab-token")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
if token != secret.expose_secret() {
tracing::warn!("GitLab webhook: invalid token");
if token != secret {
tracing::warn!("GitLab webhook: invalid token for repo {repo_id}");
return StatusCode::UNAUTHORIZED;
}
}
@@ -38,8 +56,18 @@ pub async fn handle_gitlab_webhook(
let event_type = payload["object_kind"].as_str().unwrap_or("");
match event_type {
"push" => handle_push(agent, &payload).await,
"merge_request" => handle_merge_request(agent, &payload).await,
"push" => {
let agent_clone = (*agent).clone();
let repo_id = repo_id.clone();
tokio::spawn(async move {
tracing::info!("GitLab push webhook: triggering scan for {repo_id}");
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
tracing::error!("Webhook-triggered scan failed: {e}");
}
});
StatusCode::OK
}
"merge_request" => handle_merge_request(agent, &repo_id, &payload).await,
_ => {
tracing::debug!("GitLab webhook: ignoring event '{event_type}'");
StatusCode::OK
@@ -47,40 +75,9 @@ pub async fn handle_gitlab_webhook(
}
}
async fn handle_push(agent: Arc<ComplianceAgent>, payload: &serde_json::Value) -> StatusCode {
let repo_url = payload["project"]["git_http_url"]
.as_str()
.or_else(|| payload["project"]["web_url"].as_str())
.unwrap_or("");
if repo_url.is_empty() {
return StatusCode::BAD_REQUEST;
}
let repo = agent
.db
.repositories()
.find_one(mongodb::bson::doc! { "git_url": repo_url })
.await
.ok()
.flatten();
if let Some(repo) = repo {
let repo_id = repo.id.map(|id| id.to_hex()).unwrap_or_default();
let agent_clone = (*agent).clone();
tokio::spawn(async move {
tracing::info!("GitLab push webhook: triggering scan for {repo_id}");
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
tracing::error!("Webhook-triggered scan failed: {e}");
}
});
}
StatusCode::OK
}
async fn handle_merge_request(
_agent: Arc<ComplianceAgent>,
agent: Arc<ComplianceAgent>,
repo_id: &str,
payload: &serde_json::Value,
) -> StatusCode {
let action = payload["object_attributes"]["action"]
@@ -91,7 +88,31 @@ async fn handle_merge_request(
}
let mr_iid = payload["object_attributes"]["iid"].as_u64().unwrap_or(0);
tracing::info!("GitLab MR webhook: MR !{mr_iid} {action}");
let head_sha = payload["object_attributes"]["last_commit"]["id"]
.as_str()
.unwrap_or("");
let base_sha = payload["object_attributes"]["diff_refs"]["base_sha"]
.as_str()
.unwrap_or("");
if mr_iid == 0 || head_sha.is_empty() || base_sha.is_empty() {
tracing::warn!("GitLab MR webhook: missing required fields");
return StatusCode::BAD_REQUEST;
}
let repo_id = repo_id.to_string();
let head_sha = head_sha.to_string();
let base_sha = base_sha.to_string();
let agent_clone = (*agent).clone();
tokio::spawn(async move {
tracing::info!("GitLab MR webhook: reviewing MR !{mr_iid} on {repo_id}");
if let Err(e) = agent_clone
.run_pr_review(&repo_id, mr_iid, &base_sha, &head_sha)
.await
{
tracing::error!("MR review failed for !{mr_iid}: {e}");
}
});
StatusCode::OK
}

View File

@@ -1,3 +1,4 @@
pub mod gitea;
pub mod github;
pub mod gitlab;
pub mod server;

View File

@@ -5,12 +5,23 @@ use axum::{Extension, Router};
use crate::agent::ComplianceAgent;
use crate::error::AgentError;
use crate::webhooks::{github, gitlab};
use crate::webhooks::{gitea, github, gitlab};
pub async fn start_webhook_server(agent: &ComplianceAgent) -> Result<(), AgentError> {
let app = Router::new()
.route("/webhook/github", post(github::handle_github_webhook))
.route("/webhook/gitlab", post(gitlab::handle_gitlab_webhook))
// Per-repo webhook URLs: /webhook/{platform}/{repo_id}
.route(
"/webhook/github/{repo_id}",
post(github::handle_github_webhook),
)
.route(
"/webhook/gitlab/{repo_id}",
post(gitlab::handle_gitlab_webhook),
)
.route(
"/webhook/gitea/{repo_id}",
post(gitea::handle_gitea_webhook),
)
.layer(Extension(Arc::new(agent.clone())));
let addr = "0.0.0.0:3002";

View File

@@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize};
pub enum TrackerType {
GitHub,
GitLab,
Gitea,
Jira,
}
@@ -14,6 +15,7 @@ impl std::fmt::Display for TrackerType {
match self {
Self::GitHub => write!(f, "github"),
Self::GitLab => write!(f, "gitlab"),
Self::Gitea => write!(f, "gitea"),
Self::Jira => write!(f, "jira"),
}
}

View File

@@ -25,9 +25,15 @@ pub struct TrackedRepository {
pub scan_schedule: Option<String>,
#[serde(default)]
pub webhook_enabled: bool,
/// Auto-generated HMAC secret for verifying incoming webhooks
#[serde(default, skip_serializing_if = "Option::is_none")]
pub webhook_secret: Option<String>,
pub tracker_type: Option<TrackerType>,
pub tracker_owner: Option<String>,
pub tracker_repo: Option<String>,
/// Optional per-repo PAT for the issue tracker (GitHub/GitLab/Jira)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tracker_token: Option<String>,
/// Optional auth token for HTTPS private repos (PAT or password)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_token: Option<String>,
@@ -69,6 +75,8 @@ where
impl TrackedRepository {
pub fn new(name: String, git_url: String) -> Self {
let now = Utc::now();
// Generate a random webhook secret (hex-encoded UUID v4, no dashes)
let webhook_secret = uuid::Uuid::new_v4().to_string().replace('-', "");
Self {
id: None,
name,
@@ -79,9 +87,11 @@ impl TrackedRepository {
auth_token: None,
auth_username: None,
webhook_enabled: false,
webhook_secret: Some(webhook_secret),
tracker_type: None,
tracker_owner: None,
tracker_repo: None,
tracker_token: None,
last_scanned_commit: None,
findings_count: 0,
created_at: now,

View File

@@ -7,6 +7,7 @@ pub mod findings;
pub mod graph;
pub mod issues;
pub mod mcp;
#[allow(clippy::too_many_arguments)]
pub mod repositories;
pub mod sbom;
pub mod scans;

View File

@@ -36,6 +36,10 @@ pub async fn add_repository(
default_branch: String,
auth_token: Option<String>,
auth_username: Option<String>,
tracker_type: Option<String>,
tracker_owner: Option<String>,
tracker_repo: Option<String>,
tracker_token: Option<String>,
) -> Result<(), ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
@@ -52,6 +56,18 @@ pub async fn add_repository(
if let Some(username) = auth_username.filter(|u| !u.is_empty()) {
body["auth_username"] = serde_json::Value::String(username);
}
if let Some(tt) = tracker_type.filter(|t| !t.is_empty()) {
body["tracker_type"] = serde_json::Value::String(tt);
}
if let Some(to) = tracker_owner.filter(|t| !t.is_empty()) {
body["tracker_owner"] = serde_json::Value::String(to);
}
if let Some(tr) = tracker_repo.filter(|t| !t.is_empty()) {
body["tracker_repo"] = serde_json::Value::String(tr);
}
if let Some(tk) = tracker_token.filter(|t| !t.is_empty()) {
body["tracker_token"] = serde_json::Value::String(tk);
}
let client = reqwest::Client::new();
let resp = client
@@ -71,6 +87,70 @@ pub async fn add_repository(
Ok(())
}
#[server]
pub async fn update_repository(
repo_id: String,
name: Option<String>,
default_branch: Option<String>,
auth_token: Option<String>,
auth_username: Option<String>,
tracker_type: Option<String>,
tracker_owner: Option<String>,
tracker_repo: Option<String>,
tracker_token: Option<String>,
scan_schedule: Option<String>,
) -> Result<(), ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!("{}/api/v1/repositories/{repo_id}", state.agent_api_url);
let mut body = serde_json::Map::new();
if let Some(v) = name.filter(|s| !s.is_empty()) {
body.insert("name".into(), serde_json::Value::String(v));
}
if let Some(v) = default_branch.filter(|s| !s.is_empty()) {
body.insert("default_branch".into(), serde_json::Value::String(v));
}
if let Some(v) = auth_token {
body.insert("auth_token".into(), serde_json::Value::String(v));
}
if let Some(v) = auth_username {
body.insert("auth_username".into(), serde_json::Value::String(v));
}
if let Some(v) = tracker_type {
body.insert("tracker_type".into(), serde_json::Value::String(v));
}
if let Some(v) = tracker_owner {
body.insert("tracker_owner".into(), serde_json::Value::String(v));
}
if let Some(v) = tracker_repo {
body.insert("tracker_repo".into(), serde_json::Value::String(v));
}
if let Some(v) = tracker_token {
body.insert("tracker_token".into(), serde_json::Value::String(v));
}
if let Some(v) = scan_schedule {
body.insert("scan_schedule".into(), serde_json::Value::String(v));
}
let client = reqwest::Client::new();
let resp = client
.patch(&url)
.json(&body)
.send()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
if !resp.status().is_success() {
let text = resp.text().await.unwrap_or_default();
return Err(ServerFnError::new(format!(
"Failed to update repository: {text}"
)));
}
Ok(())
}
#[server]
pub async fn fetch_ssh_public_key() -> Result<String, ServerFnError> {
let state: super::server_state::ServerState =
@@ -136,6 +216,31 @@ pub async fn trigger_repo_scan(repo_id: String) -> Result<(), ServerFnError> {
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct WebhookConfigResponse {
pub webhook_secret: Option<String>,
pub tracker_type: String,
}
#[server]
pub async fn fetch_webhook_config(repo_id: String) -> Result<WebhookConfigResponse, ServerFnError> {
let state: super::server_state::ServerState =
dioxus_fullstack::FullstackContext::extract().await?;
let url = format!(
"{}/api/v1/repositories/{repo_id}/webhook-config",
state.agent_api_url
);
let resp = reqwest::get(&url)
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
let body: WebhookConfigResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(e.to_string()))?;
Ok(body)
}
/// Check if a repository has any running scans
#[server]
pub async fn check_repo_scanning(repo_id: String) -> Result<bool, ServerFnError> {

View File

@@ -1,4 +1,4 @@
use axum::routing::get;
use axum::routing::{get, post};
use axum::{middleware, Extension};
use dioxus::prelude::*;
use time::Duration;
@@ -63,6 +63,8 @@ pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
.route("/auth", get(auth_login))
.route("/auth/callback", get(auth_callback))
.route("/logout", get(logout))
// Webhook proxy: forward to agent (no auth required)
.route("/webhook/{platform}/{repo_id}", post(webhook_proxy))
.serve_dioxus_application(ServeConfig::new(), app)
.layer(Extension(PendingOAuthStore::default()))
.layer(middleware::from_fn(require_auth))
@@ -77,6 +79,53 @@ pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
})
}
/// Forward incoming webhooks to the agent's webhook server.
/// The dashboard acts as a public-facing proxy so the agent isn't exposed directly.
async fn webhook_proxy(
Extension(state): Extension<ServerState>,
axum::extract::Path((platform, repo_id)): axum::extract::Path<(String, String)>,
headers: axum::http::HeaderMap,
body: axum::body::Bytes,
) -> axum::http::StatusCode {
// The agent_api_url typically looks like "http://agent:3001" or "http://localhost:3001"
// Webhook routes are on the same server, so strip any trailing path
let base = state.agent_api_url.trim_end_matches('/');
// Remove /api/v1 suffix if present to get base URL
let base = base
.strip_suffix("/api/v1")
.or_else(|| base.strip_suffix("/api"))
.unwrap_or(base);
let agent_url = format!("{base}/webhook/{platform}/{repo_id}");
// Forward all relevant headers
let client = reqwest::Client::new();
let mut req = client.post(&agent_url).body(body.to_vec());
for (name, value) in &headers {
let name_str = name.as_str().to_lowercase();
// Forward platform-specific headers
if name_str.starts_with("x-gitea-")
|| name_str.starts_with("x-github-")
|| name_str.starts_with("x-hub-")
|| name_str.starts_with("x-gitlab-")
|| name_str == "content-type"
{
if let Ok(v) = value.to_str() {
req = req.header(name.as_str(), v);
}
}
}
match req.send().await {
Ok(resp) => axum::http::StatusCode::from_u16(resp.status().as_u16())
.unwrap_or(axum::http::StatusCode::BAD_GATEWAY),
Err(e) => {
tracing::error!("Webhook proxy failed: {e}");
axum::http::StatusCode::BAD_GATEWAY
}
}
}
/// Seed three default MCP server configs (Findings, SBOM, DAST) if they don't already exist.
async fn seed_default_mcp_servers(db: &Database, mcp_endpoint_url: Option<&str>) {
let endpoint = mcp_endpoint_url.unwrap_or("http://localhost:8090");

View File

@@ -1,5 +1,7 @@
use dioxus::prelude::*;
use dioxus_free_icons::icons::bs_icons::*;
#[allow(unused_imports)]
use dioxus_free_icons::icons::bs_icons::{BsGear, BsPencil};
use dioxus_free_icons::Icon;
use crate::components::page_header::PageHeader;
@@ -29,9 +31,24 @@ pub fn RepositoriesPage() -> Element {
let mut auth_username = use_signal(String::new);
let mut show_auth = use_signal(|| false);
let mut ssh_public_key = use_signal(String::new);
let mut show_tracker = use_signal(|| false);
let mut tracker_type_val = use_signal(String::new);
let mut tracker_owner_val = use_signal(String::new);
let mut tracker_repo_val = use_signal(String::new);
let mut tracker_token_val = use_signal(String::new);
let mut adding = use_signal(|| false);
let mut toasts = use_context::<Toasts>();
let mut confirm_delete = use_signal(|| Option::<(String, String)>::None); // (id, name)
let mut edit_repo_id = use_signal(|| Option::<String>::None);
let mut edit_name = use_signal(String::new);
let mut edit_branch = use_signal(String::new);
let mut edit_tracker_type = use_signal(String::new);
let mut edit_tracker_owner = use_signal(String::new);
let mut edit_tracker_repo = use_signal(String::new);
let mut edit_tracker_token = use_signal(String::new);
let mut edit_saving = use_signal(|| false);
let mut edit_webhook_secret = use_signal(|| Option::<String>::None);
let mut edit_webhook_tracker = use_signal(String::new);
let mut scanning_ids = use_signal(Vec::<String>::new);
let mut graph_repo_id = use_signal(|| Option::<String>::None);
@@ -154,6 +171,63 @@ pub fn RepositoriesPage() -> Element {
}
}
// Issue tracker config section
div { style: "margin-top: 8px;",
button {
class: "btn btn-ghost",
style: "font-size: 12px; padding: 4px 8px;",
onclick: move |_| show_tracker.toggle(),
if show_tracker() { "Hide tracker options" } else { "Issue tracker?" }
}
}
if show_tracker() {
div { class: "auth-section", style: "margin-top: 12px; padding: 12px; border: 1px solid var(--border-subtle); border-radius: 8px;",
p { style: "font-size: 12px; color: var(--text-secondary); margin-bottom: 8px;",
"Configure an issue tracker to auto-create issues from findings"
}
div { class: "form-group",
label { "Tracker Type" }
select {
value: "{tracker_type_val}",
onchange: move |e| tracker_type_val.set(e.value()),
option { value: "", "None" }
option { value: "github", "GitHub" }
option { value: "gitlab", "GitLab" }
option { value: "gitea", "Gitea" }
option { value: "jira", "Jira" }
}
}
div { class: "form-group",
label { "Owner / Namespace" }
input {
r#type: "text",
placeholder: "org-name",
value: "{tracker_owner_val}",
oninput: move |e| tracker_owner_val.set(e.value()),
}
}
div { class: "form-group",
label { "Repository / Project" }
input {
r#type: "text",
placeholder: "repo-name",
value: "{tracker_repo_val}",
oninput: move |e| tracker_repo_val.set(e.value()),
}
}
div { class: "form-group",
label { "Tracker Token (PAT)" }
input {
r#type: "password",
placeholder: "ghp_xxxx / glpat-xxxx",
value: "{tracker_token_val}",
oninput: move |e| tracker_token_val.set(e.value()),
}
}
}
}
button {
class: "btn btn-primary",
disabled: adding(),
@@ -169,9 +243,13 @@ pub fn RepositoriesPage() -> Element {
let v = auth_username();
if v.is_empty() { None } else { Some(v) }
};
let tt = { let v = tracker_type_val(); if v.is_empty() { None } else { Some(v) } };
let t_owner = { let v = tracker_owner_val(); if v.is_empty() { None } else { Some(v) } };
let t_repo = { let v = tracker_repo_val(); if v.is_empty() { None } else { Some(v) } };
let t_tok = { let v = tracker_token_val(); if v.is_empty() { None } else { Some(v) } };
adding.set(true);
spawn(async move {
match crate::infrastructure::repositories::add_repository(n, u, b, tok, usr).await {
match crate::infrastructure::repositories::add_repository(n, u, b, tok, usr, tt, t_owner, t_repo, t_tok).await {
Ok(_) => {
toasts.push(ToastType::Success, "Repository added");
repos.restart();
@@ -182,10 +260,15 @@ pub fn RepositoriesPage() -> Element {
});
show_add_form.set(false);
show_auth.set(false);
show_tracker.set(false);
name.set(String::new());
git_url.set(String::new());
auth_token.set(String::new());
auth_username.set(String::new());
tracker_type_val.set(String::new());
tracker_owner_val.set(String::new());
tracker_repo_val.set(String::new());
tracker_token_val.set(String::new());
},
if adding() { "Validating..." } else { "Add" }
}
@@ -234,6 +317,139 @@ pub fn RepositoriesPage() -> Element {
}
}
// ── Edit repository dialog ──
if let Some(eid) = edit_repo_id() {
div { class: "modal-overlay",
div { class: "modal-dialog",
h3 { "Edit Repository" }
div { class: "form-group",
label { "Name" }
input {
r#type: "text",
value: "{edit_name}",
oninput: move |e| edit_name.set(e.value()),
}
}
div { class: "form-group",
label { "Default Branch" }
input {
r#type: "text",
value: "{edit_branch}",
oninput: move |e| edit_branch.set(e.value()),
}
}
h4 { style: "margin-top: 16px; margin-bottom: 8px; font-size: 14px; color: var(--text-secondary);", "Issue Tracker" }
div { class: "form-group",
label { "Tracker Type" }
select {
value: "{edit_tracker_type}",
onchange: move |e| edit_tracker_type.set(e.value()),
option { value: "", "None" }
option { value: "github", "GitHub" }
option { value: "gitlab", "GitLab" }
option { value: "gitea", "Gitea" }
option { value: "jira", "Jira" }
}
}
div { class: "form-group",
label { "Owner / Namespace" }
input {
r#type: "text",
placeholder: "org-name",
value: "{edit_tracker_owner}",
oninput: move |e| edit_tracker_owner.set(e.value()),
}
}
div { class: "form-group",
label { "Repository / Project" }
input {
r#type: "text",
placeholder: "repo-name",
value: "{edit_tracker_repo}",
oninput: move |e| edit_tracker_repo.set(e.value()),
}
}
div { class: "form-group",
label { "Tracker Token (leave empty to keep existing)" }
input {
r#type: "password",
placeholder: "Enter new token to change",
value: "{edit_tracker_token}",
oninput: move |e| edit_tracker_token.set(e.value()),
}
}
// Webhook configuration section
if let Some(secret) = edit_webhook_secret() {
h4 {
style: "margin-top: 16px; margin-bottom: 8px; font-size: 14px; color: var(--text-secondary);",
"Webhook Configuration"
}
p {
style: "font-size: 12px; color: var(--text-secondary); margin-bottom: 8px;",
"Add this webhook in your repository settings to enable push-triggered scans and PR reviews."
}
div { class: "form-group",
label { "Webhook URL" }
input {
r#type: "text",
readonly: true,
style: "font-family: monospace; font-size: 12px;",
value: format!("/webhook/{}/{eid}", edit_webhook_tracker()),
}
p {
style: "font-size: 11px; color: var(--text-secondary); margin-top: 4px;",
"Use the full dashboard URL as the base, e.g. https://your-domain.com/webhook/..."
}
}
div { class: "form-group",
label { "Webhook Secret" }
input {
r#type: "text",
readonly: true,
style: "font-family: monospace; font-size: 12px;",
value: "{secret}",
}
}
}
div { class: "modal-actions",
button {
class: "btn btn-secondary",
onclick: move |_| edit_repo_id.set(None),
"Cancel"
}
button {
class: "btn btn-primary",
disabled: edit_saving(),
onclick: move |_| {
let id = eid.clone();
let nm = { let v = edit_name(); if v.is_empty() { None } else { Some(v) } };
let br = { let v = edit_branch(); if v.is_empty() { None } else { Some(v) } };
let tt = { let v = edit_tracker_type(); if v.is_empty() { None } else { Some(v) } };
let t_owner = { let v = edit_tracker_owner(); if v.is_empty() { None } else { Some(v) } };
let t_repo = { let v = edit_tracker_repo(); if v.is_empty() { None } else { Some(v) } };
let t_tok = { let v = edit_tracker_token(); if v.is_empty() { None } else { Some(v) } };
edit_saving.set(true);
spawn(async move {
match crate::infrastructure::repositories::update_repository(
id, nm, br, None, None, tt, t_owner, t_repo, t_tok, None,
).await {
Ok(_) => {
toasts.push(ToastType::Success, "Repository updated");
repos.restart();
}
Err(e) => toasts.push(ToastType::Error, e.to_string()),
}
edit_saving.set(false);
edit_repo_id.set(None);
});
},
if edit_saving() { "Saving..." } else { "Save" }
}
}
}
}
}
match &*repos.read() {
Some(Some(resp)) => {
let total_pages = resp.total.unwrap_or(0).div_ceil(20).max(1);
@@ -257,7 +473,9 @@ pub fn RepositoriesPage() -> Element {
let repo_id = repo.id.as_ref().map(|id| id.to_hex()).unwrap_or_default();
let repo_id_scan = repo_id.clone();
let repo_id_del = repo_id.clone();
let repo_id_edit = repo_id.clone();
let repo_name_del = repo.name.clone();
let edit_repo_data = repo.clone();
let is_scanning = scanning_ids().contains(&repo_id);
rsx! {
tr {
@@ -302,6 +520,32 @@ pub fn RepositoriesPage() -> Element {
},
Icon { icon: BsDiagram3, width: 16, height: 16 }
}
button {
class: "btn btn-ghost",
title: "Edit repository",
onclick: move |_| {
edit_name.set(edit_repo_data.name.clone());
edit_branch.set(edit_repo_data.default_branch.clone());
edit_tracker_type.set(
edit_repo_data.tracker_type.as_ref().map(|t| t.to_string()).unwrap_or_default()
);
edit_tracker_owner.set(edit_repo_data.tracker_owner.clone().unwrap_or_default());
edit_tracker_repo.set(edit_repo_data.tracker_repo.clone().unwrap_or_default());
edit_tracker_token.set(String::new());
edit_webhook_secret.set(None);
edit_webhook_tracker.set(String::new());
edit_repo_id.set(Some(repo_id_edit.clone()));
// Fetch webhook config in background
let rid = repo_id_edit.clone();
spawn(async move {
if let Ok(cfg) = crate::infrastructure::repositories::fetch_webhook_config(rid).await {
edit_webhook_secret.set(cfg.webhook_secret);
edit_webhook_tracker.set(cfg.tracker_type);
}
});
},
Icon { icon: BsPencil, width: 16, height: 16 }
}
button {
class: if is_scanning { "btn btn-ghost btn-scanning" } else { "btn btn-ghost" },
title: "Trigger scan",

View File

@@ -677,23 +677,32 @@ fn license_type_class(is_copyleft: bool) -> &'static str {
#[cfg(feature = "web")]
fn trigger_download(content: &str, filename: &str) {
use wasm_bindgen::JsCast;
let window = web_sys::window().expect("no window");
let document = window.document().expect("no document");
let Some(window) = web_sys::window() else {
return;
};
let Some(document) = window.document() else {
return;
};
let blob_parts = js_sys::Array::new();
blob_parts.push(&wasm_bindgen::JsValue::from_str(content));
let mut opts = web_sys::BlobPropertyBag::new();
opts.type_("application/json");
let blob = web_sys::Blob::new_with_str_sequence_and_options(&blob_parts, &opts).expect("blob");
let opts = web_sys::BlobPropertyBag::new();
opts.set_type("application/json");
let Ok(blob) = web_sys::Blob::new_with_str_sequence_and_options(&blob_parts, &opts) else {
return;
};
let url = web_sys::Url::create_object_url_with_blob(&blob).expect("object url");
let Ok(url) = web_sys::Url::create_object_url_with_blob(&blob) else {
return;
};
let a: web_sys::HtmlAnchorElement = document
.create_element("a")
.expect("create a")
.dyn_into()
.expect("cast");
let Ok(el) = document.create_element("a") else {
return;
};
let Ok(a) = el.dyn_into::<web_sys::HtmlAnchorElement>() else {
return;
};
a.set_href(&url);
a.set_download(filename);
a.click();

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use compliance_core::error::CoreError;
use compliance_core::traits::graph_builder::{LanguageParser, ParseOutput};
use tracing::info;
use tracing::{debug, info};
use super::javascript::JavaScriptParser;
use super::python::PythonParser;
@@ -65,7 +65,7 @@ impl ParserRegistry {
};
let parser = &self.parsers[parser_idx];
info!(
debug!(
file = %file_path.display(),
language = parser.language(),
"Parsing file"