Initial commit: Compliance Scanner Agent
Autonomous security and compliance scanning agent for git repositories. Features: SAST (Semgrep), SBOM (Syft), CVE monitoring (OSV.dev/NVD), GDPR/OAuth pattern detection, LLM triage, issue creation (GitHub/GitLab/Jira), PR reviews, and Dioxus fullstack dashboard. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
39
.env.example
Normal file
39
.env.example
Normal file
@@ -0,0 +1,39 @@
|
||||
# MongoDB
|
||||
MONGODB_URI=mongodb://root:example@localhost:27017/compliance_scanner?authSource=admin
|
||||
MONGODB_DATABASE=compliance_scanner
|
||||
|
||||
# LiteLLM
|
||||
LITELLM_URL=http://localhost:4000
|
||||
LITELLM_API_KEY=
|
||||
LITELLM_MODEL=gpt-4o
|
||||
|
||||
# GitHub
|
||||
GITHUB_TOKEN=
|
||||
GITHUB_WEBHOOK_SECRET=
|
||||
|
||||
# GitLab
|
||||
GITLAB_URL=https://gitlab.com
|
||||
GITLAB_TOKEN=
|
||||
GITLAB_WEBHOOK_SECRET=
|
||||
|
||||
# Jira
|
||||
JIRA_URL=https://your-org.atlassian.net
|
||||
JIRA_EMAIL=
|
||||
JIRA_API_TOKEN=
|
||||
JIRA_PROJECT_KEY=
|
||||
|
||||
# SearXNG
|
||||
SEARXNG_URL=http://localhost:8888
|
||||
|
||||
# NVD
|
||||
NVD_API_KEY=
|
||||
|
||||
# Agent
|
||||
AGENT_PORT=3001
|
||||
SCAN_SCHEDULE=0 0 */6 * * *
|
||||
CVE_MONITOR_SCHEDULE=0 0 0 * * *
|
||||
GIT_CLONE_BASE_PATH=/tmp/compliance-scanner/repos
|
||||
|
||||
# Dashboard
|
||||
DASHBOARD_PORT=8080
|
||||
AGENT_API_URL=http://localhost:3001
|
||||
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/target
|
||||
.env
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
5308
Cargo.lock
generated
Normal file
5308
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
Normal file
24
Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[workspace]
|
||||
members = ["compliance-core", "compliance-agent", "compliance-dashboard"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
unwrap_used = "deny"
|
||||
expect_used = "deny"
|
||||
|
||||
[workspace.dependencies]
|
||||
compliance-core = { path = "compliance-core" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
mongodb = { version = "3", features = ["rustls-tls", "compat-3-0-0"] }
|
||||
reqwest = { version = "0.12", features = ["json", "rustls-tls"], default-features = false }
|
||||
thiserror = "2"
|
||||
sha2 = "0.10"
|
||||
hex = "0.4"
|
||||
uuid = { version = "1", features = ["v4", "serde"] }
|
||||
secrecy = { version = "0.10", features = ["serde"] }
|
||||
regex = "1"
|
||||
11
Dioxus.toml
Normal file
11
Dioxus.toml
Normal file
@@ -0,0 +1,11 @@
|
||||
[application]
|
||||
name = "compliance-dashboard"
|
||||
default_platform = "web"
|
||||
asset_dir = "assets"
|
||||
|
||||
[web.app]
|
||||
title = "Compliance Scanner Dashboard"
|
||||
|
||||
[web.watcher]
|
||||
reload_html = true
|
||||
watch_path = ["compliance-dashboard/src", "assets"]
|
||||
14
Dockerfile.agent
Normal file
14
Dockerfile.agent
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM rust:1.89-bookworm AS builder
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN cargo build --release -p compliance-agent
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
RUN apt-get update && apt-get install -y ca-certificates libssl3 git && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /app/target/release/compliance-agent /usr/local/bin/compliance-agent
|
||||
|
||||
EXPOSE 3001 3002
|
||||
|
||||
ENTRYPOINT ["compliance-agent"]
|
||||
18
Dockerfile.dashboard
Normal file
18
Dockerfile.dashboard
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM rust:1.89-bookworm AS builder
|
||||
|
||||
RUN cargo install dioxus-cli --version 0.7.3
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN dx build --release --features server --platform web
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
RUN apt-get update && apt-get install -y ca-certificates libssl3 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /app/target/release/compliance-dashboard /usr/local/bin/compliance-dashboard
|
||||
COPY --from=builder /app/target/dx/compliance-dashboard/release/web/public /app/public
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["compliance-dashboard"]
|
||||
205
README.md
Normal file
205
README.md
Normal file
@@ -0,0 +1,205 @@
|
||||
<p align="center">
|
||||
<img src="assets/favicon.svg" width="96" height="96" alt="Compliance Scanner Logo" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">Compliance Scanner</h1>
|
||||
|
||||
<p align="center">
|
||||
<strong>Autonomous security and compliance scanning agent for git repositories</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.rust-lang.org/"><img src="https://img.shields.io/badge/Rust-1.89-orange?logo=rust&logoColor=white" alt="Rust" /></a>
|
||||
<a href="https://dioxuslabs.com/"><img src="https://img.shields.io/badge/Dioxus-0.7-blue?logo=webassembly&logoColor=white" alt="Dioxus" /></a>
|
||||
<a href="https://www.mongodb.com/"><img src="https://img.shields.io/badge/MongoDB-8.0-47A248?logo=mongodb&logoColor=white" alt="MongoDB" /></a>
|
||||
<a href="https://axum.rs/"><img src="https://img.shields.io/badge/Axum-0.8-4A4A55?logo=rust&logoColor=white" alt="Axum" /></a>
|
||||
<a href="https://tailwindcss.com/"><img src="https://img.shields.io/badge/Tailwind_CSS-4-06B6D4?logo=tailwindcss&logoColor=white" alt="Tailwind CSS" /></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="https://img.shields.io/badge/GDPR-Scanning-green" alt="GDPR" />
|
||||
<img src="https://img.shields.io/badge/OAuth-Scanning-green" alt="OAuth" />
|
||||
<img src="https://img.shields.io/badge/SAST-Semgrep-blue" alt="SAST" />
|
||||
<img src="https://img.shields.io/badge/CVE-OSV.dev%20%2B%20NVD-orange" alt="CVE" />
|
||||
<img src="https://img.shields.io/badge/Platform-Linux%20%7C%20Docker-lightgrey?logo=linux&logoColor=white" alt="Platform" />
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
Compliance Scanner is an autonomous agent that continuously monitors git repositories for security vulnerabilities, GDPR/OAuth compliance patterns, and dependency risks. It creates issues in external trackers (GitHub/GitLab/Jira) with evidence and remediation suggestions, reviews pull requests, and exposes a Dioxus-based dashboard for visualization.
|
||||
|
||||
> **How it works:** The agent runs as a lazy daemon -- it only scans when new commits are detected, triggered by cron schedules or webhooks. LLM-powered triage filters out false positives and generates actionable remediation.
|
||||
|
||||
## Features
|
||||
|
||||
| Area | Capabilities |
|
||||
|------|-------------|
|
||||
| **SAST Scanning** | Semgrep-based static analysis with auto-config rules |
|
||||
| **SBOM Generation** | Syft + cargo-audit for complete dependency inventory |
|
||||
| **CVE Monitoring** | OSV.dev batch queries, NVD CVSS enrichment, SearXNG context |
|
||||
| **GDPR Patterns** | Detect PII logging, missing consent, hardcoded retention, missing deletion |
|
||||
| **OAuth Patterns** | Detect implicit grant, missing PKCE, token in localStorage, token in URLs |
|
||||
| **LLM Triage** | Confidence scoring via LiteLLM to filter false positives |
|
||||
| **Issue Creation** | Auto-create issues in GitHub, GitLab, or Jira with code evidence |
|
||||
| **PR Reviews** | Post security review comments on pull requests |
|
||||
| **Dashboard** | Fullstack Dioxus UI with findings, SBOM, issues, and statistics |
|
||||
| **Webhooks** | GitHub (HMAC-SHA256) and GitLab webhook receivers for push/PR events |
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Cargo Workspace │
|
||||
├──────────────┬──────────────────┬───────────────────────────┤
|
||||
│ compliance- │ compliance- │ compliance- │
|
||||
│ core │ agent │ dashboard │
|
||||
│ (lib) │ (bin) │ (bin, Dioxus 0.7.3) │
|
||||
│ │ │ │
|
||||
│ Models │ Scan Pipeline │ Fullstack Web UI │
|
||||
│ Traits │ LLM Client │ Server Functions │
|
||||
│ Config │ Issue Trackers │ Charts + Tables │
|
||||
│ Errors │ Scheduler │ Settings Page │
|
||||
│ │ REST API │ │
|
||||
│ │ Webhooks │ │
|
||||
└──────────────┴──────────────────┴───────────────────────────┘
|
||||
│
|
||||
MongoDB (shared)
|
||||
```
|
||||
|
||||
## Scan Pipeline (7 Stages)
|
||||
|
||||
1. **Change Detection** -- `git2` fetch, compare HEAD SHA with last scanned commit
|
||||
2. **Semgrep SAST** -- CLI wrapper with JSON output parsing
|
||||
3. **SBOM Generation** -- Syft (CycloneDX) + cargo-audit vulnerability merge
|
||||
4. **CVE Scanning** -- OSV.dev batch + NVD CVSS enrichment + SearXNG context
|
||||
5. **Pattern Scanning** -- Regex-based GDPR and OAuth compliance checks
|
||||
6. **LLM Triage** -- LiteLLM confidence scoring, filter findings < 3/10
|
||||
7. **Issue Creation** -- Dedup via SHA-256 fingerprint, create tracker issues
|
||||
|
||||
## Tech Stack
|
||||
|
||||
| Layer | Technology |
|
||||
|-------|-----------|
|
||||
| Shared Library | `compliance-core` -- models, traits, config |
|
||||
| Agent | Axum REST API, git2, tokio-cron-scheduler, Semgrep, Syft |
|
||||
| Dashboard | Dioxus 0.7.3 fullstack, Tailwind CSS |
|
||||
| Database | MongoDB with typed collections |
|
||||
| LLM | LiteLLM (OpenAI-compatible API) |
|
||||
| Issue Trackers | GitHub (octocrab), GitLab (REST v4), Jira (REST v3) |
|
||||
| CVE Sources | OSV.dev, NVD, SearXNG |
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Rust 1.89+
|
||||
- [Dioxus CLI](https://dioxuslabs.com/learn/0.7/getting_started) (`dx`)
|
||||
- MongoDB
|
||||
- Docker & Docker Compose (optional)
|
||||
|
||||
### Optional External Tools
|
||||
|
||||
- [Semgrep](https://semgrep.dev/) -- for SAST scanning
|
||||
- [Syft](https://github.com/anchore/syft) -- for SBOM generation
|
||||
- [cargo-audit](https://github.com/rustsec/rustsec) -- for Rust dependency auditing
|
||||
|
||||
### Setup
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repo-url>
|
||||
cd compliance-scanner
|
||||
|
||||
# Start MongoDB + SearXNG
|
||||
docker compose up -d mongo searxng
|
||||
|
||||
# Configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env with your LiteLLM, tracker tokens, and MongoDB settings
|
||||
|
||||
# Run the agent
|
||||
cargo run -p compliance-agent
|
||||
|
||||
# Run the dashboard (separate terminal)
|
||||
dx serve --features server --platform web
|
||||
```
|
||||
|
||||
### Docker Compose (Full Stack)
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This starts MongoDB, SearXNG, the agent (port 3001), and the dashboard (port 8080).
|
||||
|
||||
## REST API
|
||||
|
||||
The agent exposes a REST API on port 3001:
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
|--------|----------|-------------|
|
||||
| `GET` | `/api/v1/health` | Health check |
|
||||
| `GET` | `/api/v1/stats/overview` | Summary statistics and trends |
|
||||
| `GET` | `/api/v1/repositories` | List tracked repositories |
|
||||
| `POST` | `/api/v1/repositories` | Add a repository to track |
|
||||
| `POST` | `/api/v1/repositories/:id/scan` | Trigger a manual scan |
|
||||
| `GET` | `/api/v1/findings` | List findings (filterable) |
|
||||
| `GET` | `/api/v1/findings/:id` | Get finding with code evidence |
|
||||
| `PATCH` | `/api/v1/findings/:id/status` | Update finding status |
|
||||
| `GET` | `/api/v1/sbom` | List dependencies |
|
||||
| `GET` | `/api/v1/issues` | List cross-tracker issues |
|
||||
| `GET` | `/api/v1/scan-runs` | Scan execution history |
|
||||
| `POST` | `/webhook/github` | GitHub webhook (HMAC-SHA256) |
|
||||
| `POST` | `/webhook/gitlab` | GitLab webhook (token verify) |
|
||||
|
||||
## Dashboard Pages
|
||||
|
||||
| Page | Description |
|
||||
|------|-------------|
|
||||
| **Overview** | Stat cards, severity distribution chart |
|
||||
| **Repositories** | Add/manage tracked repos, trigger scans |
|
||||
| **Findings** | Filterable table by severity, type, status |
|
||||
| **Finding Detail** | Code evidence, remediation, suggested fix, linked issue |
|
||||
| **SBOM** | Dependency inventory with vulnerability badges |
|
||||
| **Issues** | Cross-tracker view (GitHub + GitLab + Jira) |
|
||||
| **Settings** | Configure LiteLLM, tracker tokens, SearXNG URL |
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
compliance-scanner/
|
||||
├── compliance-core/ Shared library (models, traits, config, errors)
|
||||
├── compliance-agent/ Agent daemon (pipeline, LLM, trackers, API, webhooks)
|
||||
│ └── src/
|
||||
│ ├── pipeline/ 7-stage scan pipeline
|
||||
│ ├── llm/ LiteLLM client, triage, descriptions, fixes, PR review
|
||||
│ ├── trackers/ GitHub, GitLab, Jira integrations
|
||||
│ ├── api/ REST API (Axum)
|
||||
│ └── webhooks/ GitHub + GitLab webhook receivers
|
||||
├── compliance-dashboard/ Dioxus fullstack dashboard
|
||||
│ └── src/
|
||||
│ ├── components/ Reusable UI components
|
||||
│ ├── infrastructure/ Server functions, DB, config
|
||||
│ └── pages/ Full page views
|
||||
├── assets/ Static assets (CSS, icons)
|
||||
├── styles/ Tailwind input stylesheet
|
||||
└── bin/ Dashboard binary entrypoint
|
||||
```
|
||||
|
||||
## External Services
|
||||
|
||||
| Service | Purpose | Default URL |
|
||||
|---------|---------|-------------|
|
||||
| MongoDB | Persistence | `mongodb://localhost:27017` |
|
||||
| LiteLLM | LLM proxy for triage and generation | `http://localhost:4000` |
|
||||
| SearXNG | CVE context search | `http://localhost:8888` |
|
||||
| Semgrep | SAST scanning | CLI tool |
|
||||
| Syft | SBOM generation | CLI tool |
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<sub>Built with Rust, Dioxus, and a commitment to automated security compliance.</sub>
|
||||
</p>
|
||||
28
assets/favicon.svg
Normal file
28
assets/favicon.svg
Normal file
@@ -0,0 +1,28 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 96 96">
|
||||
<defs>
|
||||
<linearGradient id="bg" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0f172a"/>
|
||||
<stop offset="100%" stop-color="#1e293b"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="shield" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" stop-color="#38bdf8"/>
|
||||
<stop offset="100%" stop-color="#818cf8"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="96" height="96" rx="18" fill="url(#bg)"/>
|
||||
<!-- Shield outline -->
|
||||
<path d="M48 14 L28 26 L28 48 C28 62 37 74 48 78 C59 74 68 62 68 48 L68 26 Z"
|
||||
fill="none" stroke="url(#shield)" stroke-width="3" stroke-linejoin="round"/>
|
||||
<!-- Inner shield fill (subtle) -->
|
||||
<path d="M48 18 L31 28.5 L31 47 C31 59.5 39 70 48 74 C57 70 65 59.5 65 47 L65 28.5 Z"
|
||||
fill="url(#shield)" opacity="0.1"/>
|
||||
<!-- Magnifying glass -->
|
||||
<circle cx="45" cy="44" r="10" fill="none" stroke="#38bdf8" stroke-width="2.5"/>
|
||||
<line x1="52" y1="51" x2="60" y2="59" stroke="#38bdf8" stroke-width="2.5" stroke-linecap="round"/>
|
||||
<!-- Checkmark inside magnifier -->
|
||||
<path d="M40 44 L43.5 47.5 L50 41" fill="none" stroke="#22c55e" stroke-width="2.5"
|
||||
stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<!-- Scan lines (decorative) -->
|
||||
<line x1="34" y1="32" x2="46" y2="32" stroke="#38bdf8" stroke-width="1.5" opacity="0.4" stroke-linecap="round"/>
|
||||
<line x1="34" y1="36" x2="42" y2="36" stroke="#38bdf8" stroke-width="1.5" opacity="0.3" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
315
assets/main.css
Normal file
315
assets/main.css
Normal file
@@ -0,0 +1,315 @@
|
||||
:root {
|
||||
--sidebar-width: 260px;
|
||||
--header-height: 56px;
|
||||
--bg-primary: #0f172a;
|
||||
--bg-secondary: #1e293b;
|
||||
--bg-card: #1e293b;
|
||||
--text-primary: #f1f5f9;
|
||||
--text-secondary: #94a3b8;
|
||||
--accent: #38bdf8;
|
||||
--accent-hover: #7dd3fc;
|
||||
--border: #334155;
|
||||
--danger: #ef4444;
|
||||
--warning: #f59e0b;
|
||||
--success: #22c55e;
|
||||
--info: #3b82f6;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.app-shell {
|
||||
display: flex;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
width: var(--sidebar-width);
|
||||
background: var(--bg-secondary);
|
||||
border-right: 1px solid var(--border);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
z-index: 40;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
padding: 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.sidebar-header h1 {
|
||||
font-size: 16px;
|
||||
font-weight: 700;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.sidebar-nav {
|
||||
padding: 12px 8px;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 10px 12px;
|
||||
border-radius: 8px;
|
||||
color: var(--text-secondary);
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
transition: all 0.15s;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.nav-item:hover {
|
||||
background: rgba(56, 189, 248, 0.1);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.nav-item.active {
|
||||
background: rgba(56, 189, 248, 0.15);
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.main-content {
|
||||
margin-left: var(--sidebar-width);
|
||||
flex: 1;
|
||||
padding: 24px 32px;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.page-header {
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.page-header h2 {
|
||||
font-size: 24px;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.page-header p {
|
||||
color: var(--text-secondary);
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.stat-cards {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
||||
gap: 16px;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.stat-card {
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.stat-card .label {
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.stat-card .value {
|
||||
font-size: 28px;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.card {
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 20px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.card-header {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
margin-bottom: 16px;
|
||||
padding-bottom: 12px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.table-wrapper {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: left;
|
||||
padding: 12px 16px;
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
border-bottom: 1px solid var(--border);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
td {
|
||||
padding: 12px 16px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
tr:hover {
|
||||
background: rgba(56, 189, 248, 0.05);
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 10px;
|
||||
border-radius: 9999px;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.badge-critical { background: rgba(239, 68, 68, 0.2); color: #fca5a5; }
|
||||
.badge-high { background: rgba(249, 115, 22, 0.2); color: #fdba74; }
|
||||
.badge-medium { background: rgba(245, 158, 11, 0.2); color: #fcd34d; }
|
||||
.badge-low { background: rgba(34, 197, 94, 0.2); color: #86efac; }
|
||||
.badge-info { background: rgba(59, 130, 246, 0.2); color: #93c5fd; }
|
||||
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 8px 16px;
|
||||
border-radius: 8px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent);
|
||||
color: #0f172a;
|
||||
}
|
||||
|
||||
.btn-primary:hover {
|
||||
background: var(--accent-hover);
|
||||
}
|
||||
|
||||
.btn-ghost {
|
||||
background: transparent;
|
||||
color: var(--text-secondary);
|
||||
border: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.btn-ghost:hover {
|
||||
color: var(--text-primary);
|
||||
border-color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.code-block {
|
||||
background: #0d1117;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 16px;
|
||||
font-family: "JetBrains Mono", "Fira Code", monospace;
|
||||
font-size: 13px;
|
||||
line-height: 1.6;
|
||||
overflow-x: auto;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.pagination {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
.filter-bar {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-bottom: 16px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.filter-bar select,
|
||||
.filter-bar input {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 8px 12px;
|
||||
color: var(--text-primary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
margin-bottom: 6px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.form-group input,
|
||||
.form-group select {
|
||||
width: 100%;
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 10px 14px;
|
||||
color: var(--text-primary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 40px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.sidebar {
|
||||
transform: translateX(-100%);
|
||||
transition: transform 0.3s;
|
||||
}
|
||||
.sidebar.open {
|
||||
transform: translateX(0);
|
||||
}
|
||||
.main-content {
|
||||
margin-left: 0;
|
||||
padding: 16px;
|
||||
}
|
||||
}
|
||||
1
assets/tailwind.css
Normal file
1
assets/tailwind.css
Normal file
@@ -0,0 +1 @@
|
||||
/* Placeholder - generated by build.rs via bunx @tailwindcss/cli */
|
||||
23
bin/main.rs
Normal file
23
bin/main.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
fn main() {
|
||||
dioxus_logger::init(tracing::Level::DEBUG).expect("Failed to init logger");
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
{
|
||||
dioxus::web::launch::launch_cfg(
|
||||
compliance_dashboard::App,
|
||||
dioxus::web::Config::new().hydrate(true),
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
{
|
||||
compliance_dashboard::infrastructure::server_start(compliance_dashboard::App)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Unable to start server: {e}");
|
||||
})
|
||||
.expect("Server start failed")
|
||||
}
|
||||
}
|
||||
24
build.rs
Normal file
24
build.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
use std::process::Command;
|
||||
println!("cargo:rerun-if-changed=./styles/input.css");
|
||||
|
||||
match Command::new("bunx")
|
||||
.args([
|
||||
"@tailwindcss/cli",
|
||||
"-i",
|
||||
"./styles/input.css",
|
||||
"-o",
|
||||
"./assets/tailwind.css",
|
||||
])
|
||||
.status()
|
||||
{
|
||||
Ok(status) if !status.success() => {
|
||||
println!("cargo:warning=tailwind build exited with {status}, skipping CSS generation");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("cargo:warning=bunx not found ({e}), skipping tailwind CSS generation");
|
||||
}
|
||||
Ok(_) => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
1
clippy.toml
Normal file
1
clippy.toml
Normal file
@@ -0,0 +1 @@
|
||||
avoid-breaking-exported-api = false
|
||||
35
compliance-agent/Cargo.toml
Normal file
35
compliance-agent/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "compliance-agent"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
compliance-core = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
mongodb = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
axum = "0.8"
|
||||
tower-http = { version = "0.6", features = ["cors", "trace"] }
|
||||
git2 = "0.20"
|
||||
octocrab = "0.44"
|
||||
tokio-cron-scheduler = "0.13"
|
||||
dotenvy = "0.15"
|
||||
hmac = "0.12"
|
||||
walkdir = "2"
|
||||
base64 = "0.22"
|
||||
urlencoding = "2"
|
||||
futures-util = "0.3"
|
||||
45
compliance-agent/src/agent.rs
Normal file
45
compliance-agent/src/agent.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::AgentConfig;
|
||||
|
||||
use crate::database::Database;
|
||||
use crate::llm::LlmClient;
|
||||
use crate::pipeline::orchestrator::PipelineOrchestrator;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ComplianceAgent {
|
||||
pub config: AgentConfig,
|
||||
pub db: Database,
|
||||
pub llm: Arc<LlmClient>,
|
||||
pub http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl ComplianceAgent {
|
||||
pub fn new(config: AgentConfig, db: Database) -> Self {
|
||||
let llm = Arc::new(LlmClient::new(
|
||||
config.litellm_url.clone(),
|
||||
config.litellm_api_key.clone(),
|
||||
config.litellm_model.clone(),
|
||||
));
|
||||
Self {
|
||||
config,
|
||||
db,
|
||||
llm,
|
||||
http: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_scan(
|
||||
&self,
|
||||
repo_id: &str,
|
||||
trigger: compliance_core::models::ScanTrigger,
|
||||
) -> Result<(), crate::error::AgentError> {
|
||||
let orchestrator = PipelineOrchestrator::new(
|
||||
self.config.clone(),
|
||||
self.db.clone(),
|
||||
self.llm.clone(),
|
||||
self.http.clone(),
|
||||
);
|
||||
orchestrator.run(repo_id, trigger).await
|
||||
}
|
||||
}
|
||||
334
compliance-agent/src/api/handlers/mod.rs
Normal file
334
compliance-agent/src/api/handlers/mod.rs
Normal file
@@ -0,0 +1,334 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use axum::extract::{Extension, Path, Query};
|
||||
use axum::http::StatusCode;
|
||||
use axum::Json;
|
||||
use mongodb::bson::doc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::*;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct PaginationParams {
|
||||
#[serde(default = "default_page")]
|
||||
pub page: u64,
|
||||
#[serde(default = "default_limit")]
|
||||
pub limit: i64,
|
||||
}
|
||||
|
||||
fn default_page() -> u64 { 1 }
|
||||
fn default_limit() -> i64 { 50 }
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FindingsFilter {
|
||||
#[serde(default)]
|
||||
pub repo_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub severity: Option<String>,
|
||||
#[serde(default)]
|
||||
pub scan_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub status: Option<String>,
|
||||
#[serde(default = "default_page")]
|
||||
pub page: u64,
|
||||
#[serde(default = "default_limit")]
|
||||
pub limit: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ApiResponse<T: Serialize> {
|
||||
pub data: T,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub total: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct OverviewStats {
|
||||
pub total_repositories: u64,
|
||||
pub total_findings: u64,
|
||||
pub critical_findings: u64,
|
||||
pub high_findings: u64,
|
||||
pub medium_findings: u64,
|
||||
pub low_findings: u64,
|
||||
pub total_sbom_entries: u64,
|
||||
pub total_cve_alerts: u64,
|
||||
pub total_issues: u64,
|
||||
pub recent_scans: Vec<ScanRun>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AddRepositoryRequest {
|
||||
pub name: String,
|
||||
pub git_url: String,
|
||||
#[serde(default = "default_branch")]
|
||||
pub default_branch: String,
|
||||
pub tracker_type: Option<TrackerType>,
|
||||
pub tracker_owner: Option<String>,
|
||||
pub tracker_repo: Option<String>,
|
||||
pub scan_schedule: Option<String>,
|
||||
}
|
||||
|
||||
fn default_branch() -> String { "main".to_string() }
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateStatusRequest {
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
type AgentExt = Extension<Arc<ComplianceAgent>>;
|
||||
type ApiResult<T> = Result<Json<ApiResponse<T>>, StatusCode>;
|
||||
|
||||
pub async fn health() -> Json<serde_json::Value> {
|
||||
Json(serde_json::json!({ "status": "ok" }))
|
||||
}
|
||||
|
||||
pub async fn stats_overview(Extension(agent): AgentExt) -> ApiResult<OverviewStats> {
|
||||
let db = &agent.db;
|
||||
|
||||
let total_repositories = db.repositories().count_documents(doc! {}).await.unwrap_or(0);
|
||||
let total_findings = db.findings().count_documents(doc! {}).await.unwrap_or(0);
|
||||
let critical_findings = db.findings().count_documents(doc! { "severity": "critical" }).await.unwrap_or(0);
|
||||
let high_findings = db.findings().count_documents(doc! { "severity": "high" }).await.unwrap_or(0);
|
||||
let medium_findings = db.findings().count_documents(doc! { "severity": "medium" }).await.unwrap_or(0);
|
||||
let low_findings = db.findings().count_documents(doc! { "severity": "low" }).await.unwrap_or(0);
|
||||
let total_sbom_entries = db.sbom_entries().count_documents(doc! {}).await.unwrap_or(0);
|
||||
let total_cve_alerts = db.cve_alerts().count_documents(doc! {}).await.unwrap_or(0);
|
||||
let total_issues = db.tracker_issues().count_documents(doc! {}).await.unwrap_or(0);
|
||||
|
||||
let recent_scans: Vec<ScanRun> = match db
|
||||
.scan_runs()
|
||||
.find(doc! {})
|
||||
.sort(doc! { "started_at": -1 })
|
||||
.limit(10)
|
||||
.await
|
||||
{
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: OverviewStats {
|
||||
total_repositories,
|
||||
total_findings,
|
||||
critical_findings,
|
||||
high_findings,
|
||||
medium_findings,
|
||||
low_findings,
|
||||
total_sbom_entries,
|
||||
total_cve_alerts,
|
||||
total_issues,
|
||||
recent_scans,
|
||||
},
|
||||
total: None,
|
||||
page: None,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn list_repositories(
|
||||
Extension(agent): AgentExt,
|
||||
Query(params): Query<PaginationParams>,
|
||||
) -> ApiResult<Vec<TrackedRepository>> {
|
||||
let db = &agent.db;
|
||||
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
||||
let total = db.repositories().count_documents(doc! {}).await.unwrap_or(0);
|
||||
|
||||
let repos = match db.repositories().find(doc! {}).skip(skip).limit(params.limit).await {
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: repos,
|
||||
total: Some(total),
|
||||
page: Some(params.page),
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn add_repository(
|
||||
Extension(agent): AgentExt,
|
||||
Json(req): Json<AddRepositoryRequest>,
|
||||
) -> Result<Json<ApiResponse<TrackedRepository>>, StatusCode> {
|
||||
let mut repo = TrackedRepository::new(req.name, req.git_url);
|
||||
repo.default_branch = req.default_branch;
|
||||
repo.tracker_type = req.tracker_type;
|
||||
repo.tracker_owner = req.tracker_owner;
|
||||
repo.tracker_repo = req.tracker_repo;
|
||||
repo.scan_schedule = req.scan_schedule;
|
||||
|
||||
agent
|
||||
.db
|
||||
.repositories()
|
||||
.insert_one(&repo)
|
||||
.await
|
||||
.map_err(|_| StatusCode::CONFLICT)?;
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: repo,
|
||||
total: None,
|
||||
page: None,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn trigger_scan(
|
||||
Extension(agent): AgentExt,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let agent_clone = (*agent).clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = agent_clone.run_scan(&id, ScanTrigger::Manual).await {
|
||||
tracing::error!("Manual scan failed for {id}: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Json(serde_json::json!({ "status": "scan_triggered" })))
|
||||
}
|
||||
|
||||
pub async fn list_findings(
|
||||
Extension(agent): AgentExt,
|
||||
Query(filter): Query<FindingsFilter>,
|
||||
) -> ApiResult<Vec<Finding>> {
|
||||
let db = &agent.db;
|
||||
let mut query = doc! {};
|
||||
if let Some(repo_id) = &filter.repo_id {
|
||||
query.insert("repo_id", repo_id);
|
||||
}
|
||||
if let Some(severity) = &filter.severity {
|
||||
query.insert("severity", severity);
|
||||
}
|
||||
if let Some(scan_type) = &filter.scan_type {
|
||||
query.insert("scan_type", scan_type);
|
||||
}
|
||||
if let Some(status) = &filter.status {
|
||||
query.insert("status", status);
|
||||
}
|
||||
|
||||
let skip = (filter.page.saturating_sub(1)) * filter.limit as u64;
|
||||
let total = db.findings().count_documents(query.clone()).await.unwrap_or(0);
|
||||
|
||||
let findings = match db.findings().find(query).sort(doc! { "created_at": -1 }).skip(skip).limit(filter.limit).await {
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: findings,
|
||||
total: Some(total),
|
||||
page: Some(filter.page),
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn get_finding(
|
||||
Extension(agent): AgentExt,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Json<ApiResponse<Finding>>, StatusCode> {
|
||||
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
let finding = agent
|
||||
.db
|
||||
.findings()
|
||||
.find_one(doc! { "_id": oid })
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: finding,
|
||||
total: None,
|
||||
page: None,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn update_finding_status(
|
||||
Extension(agent): AgentExt,
|
||||
Path(id): Path<String>,
|
||||
Json(req): Json<UpdateStatusRequest>,
|
||||
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||
let oid = mongodb::bson::oid::ObjectId::parse_str(&id).map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
|
||||
agent
|
||||
.db
|
||||
.findings()
|
||||
.update_one(
|
||||
doc! { "_id": oid },
|
||||
doc! { "$set": { "status": &req.status, "updated_at": mongodb::bson::DateTime::now() } },
|
||||
)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(serde_json::json!({ "status": "updated" })))
|
||||
}
|
||||
|
||||
pub async fn list_sbom(
|
||||
Extension(agent): AgentExt,
|
||||
Query(params): Query<PaginationParams>,
|
||||
) -> ApiResult<Vec<SbomEntry>> {
|
||||
let db = &agent.db;
|
||||
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
||||
let total = db.sbom_entries().count_documents(doc! {}).await.unwrap_or(0);
|
||||
|
||||
let entries = match db.sbom_entries().find(doc! {}).skip(skip).limit(params.limit).await {
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: entries,
|
||||
total: Some(total),
|
||||
page: Some(params.page),
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn list_issues(
|
||||
Extension(agent): AgentExt,
|
||||
Query(params): Query<PaginationParams>,
|
||||
) -> ApiResult<Vec<TrackerIssue>> {
|
||||
let db = &agent.db;
|
||||
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
||||
let total = db.tracker_issues().count_documents(doc! {}).await.unwrap_or(0);
|
||||
|
||||
let issues = match db.tracker_issues().find(doc! {}).sort(doc! { "created_at": -1 }).skip(skip).limit(params.limit).await {
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: issues,
|
||||
total: Some(total),
|
||||
page: Some(params.page),
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn list_scan_runs(
|
||||
Extension(agent): AgentExt,
|
||||
Query(params): Query<PaginationParams>,
|
||||
) -> ApiResult<Vec<ScanRun>> {
|
||||
let db = &agent.db;
|
||||
let skip = (params.page.saturating_sub(1)) * params.limit as u64;
|
||||
let total = db.scan_runs().count_documents(doc! {}).await.unwrap_or(0);
|
||||
|
||||
let scans = match db.scan_runs().find(doc! {}).sort(doc! { "started_at": -1 }).skip(skip).limit(params.limit).await {
|
||||
Ok(cursor) => collect_cursor_async(cursor).await,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
|
||||
Ok(Json(ApiResponse {
|
||||
data: scans,
|
||||
total: Some(total),
|
||||
page: Some(params.page),
|
||||
}))
|
||||
}
|
||||
|
||||
async fn collect_cursor_async<T: serde::de::DeserializeOwned + Unpin + Send>(
|
||||
mut cursor: mongodb::Cursor<T>,
|
||||
) -> Vec<T> {
|
||||
use futures_util::StreamExt;
|
||||
let mut items = Vec::new();
|
||||
while let Some(Ok(item)) = cursor.next().await {
|
||||
items.push(item);
|
||||
}
|
||||
items
|
||||
}
|
||||
5
compliance-agent/src/api/mod.rs
Normal file
5
compliance-agent/src/api/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod handlers;
|
||||
pub mod routes;
|
||||
pub mod server;
|
||||
|
||||
pub use server::start_api_server;
|
||||
19
compliance-agent/src/api/routes.rs
Normal file
19
compliance-agent/src/api/routes.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use axum::routing::{get, patch, post};
|
||||
use axum::Router;
|
||||
|
||||
use crate::api::handlers;
|
||||
|
||||
pub fn build_router() -> Router {
|
||||
Router::new()
|
||||
.route("/api/v1/health", get(handlers::health))
|
||||
.route("/api/v1/stats/overview", get(handlers::stats_overview))
|
||||
.route("/api/v1/repositories", get(handlers::list_repositories))
|
||||
.route("/api/v1/repositories", post(handlers::add_repository))
|
||||
.route("/api/v1/repositories/{id}/scan", post(handlers::trigger_scan))
|
||||
.route("/api/v1/findings", get(handlers::list_findings))
|
||||
.route("/api/v1/findings/{id}", get(handlers::get_finding))
|
||||
.route("/api/v1/findings/{id}/status", patch(handlers::update_finding_status))
|
||||
.route("/api/v1/sbom", get(handlers::list_sbom))
|
||||
.route("/api/v1/issues", get(handlers::list_issues))
|
||||
.route("/api/v1/scan-runs", get(handlers::list_scan_runs))
|
||||
}
|
||||
28
compliance-agent/src/api/server.rs
Normal file
28
compliance-agent/src/api/server.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::Extension;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower_http::trace::TraceLayer;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
use crate::api::routes;
|
||||
use crate::error::AgentError;
|
||||
|
||||
pub async fn start_api_server(agent: ComplianceAgent, port: u16) -> Result<(), AgentError> {
|
||||
let app = routes::build_router()
|
||||
.layer(Extension(Arc::new(agent)))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(TraceLayer::new_for_http());
|
||||
|
||||
let addr = format!("0.0.0.0:{port}");
|
||||
let listener = tokio::net::TcpListener::bind(&addr)
|
||||
.await
|
||||
.map_err(|e| AgentError::Other(format!("Failed to bind to {addr}: {e}")))?;
|
||||
|
||||
tracing::info!("REST API listening on {addr}");
|
||||
axum::serve(listener, app)
|
||||
.await
|
||||
.map_err(|e| AgentError::Other(format!("API server error: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
43
compliance-agent/src/config.rs
Normal file
43
compliance-agent/src/config.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use compliance_core::AgentConfig;
|
||||
use secrecy::SecretString;
|
||||
|
||||
use crate::error::AgentError;
|
||||
|
||||
fn env_var(key: &str) -> Result<String, AgentError> {
|
||||
std::env::var(key).map_err(|_| AgentError::Config(format!("Missing env var: {key}")))
|
||||
}
|
||||
|
||||
fn env_var_opt(key: &str) -> Option<String> {
|
||||
std::env::var(key).ok().filter(|v| !v.is_empty())
|
||||
}
|
||||
|
||||
fn env_secret_opt(key: &str) -> Option<SecretString> {
|
||||
env_var_opt(key).map(SecretString::from)
|
||||
}
|
||||
|
||||
pub fn load_config() -> Result<AgentConfig, AgentError> {
|
||||
Ok(AgentConfig {
|
||||
mongodb_uri: env_var("MONGODB_URI")?,
|
||||
mongodb_database: env_var_opt("MONGODB_DATABASE").unwrap_or_else(|| "compliance_scanner".to_string()),
|
||||
litellm_url: env_var_opt("LITELLM_URL").unwrap_or_else(|| "http://localhost:4000".to_string()),
|
||||
litellm_api_key: SecretString::from(env_var_opt("LITELLM_API_KEY").unwrap_or_default()),
|
||||
litellm_model: env_var_opt("LITELLM_MODEL").unwrap_or_else(|| "gpt-4o".to_string()),
|
||||
github_token: env_secret_opt("GITHUB_TOKEN"),
|
||||
github_webhook_secret: env_secret_opt("GITHUB_WEBHOOK_SECRET"),
|
||||
gitlab_url: env_var_opt("GITLAB_URL"),
|
||||
gitlab_token: env_secret_opt("GITLAB_TOKEN"),
|
||||
gitlab_webhook_secret: env_secret_opt("GITLAB_WEBHOOK_SECRET"),
|
||||
jira_url: env_var_opt("JIRA_URL"),
|
||||
jira_email: env_var_opt("JIRA_EMAIL"),
|
||||
jira_api_token: env_secret_opt("JIRA_API_TOKEN"),
|
||||
jira_project_key: env_var_opt("JIRA_PROJECT_KEY"),
|
||||
searxng_url: env_var_opt("SEARXNG_URL"),
|
||||
nvd_api_key: env_secret_opt("NVD_API_KEY"),
|
||||
agent_port: env_var_opt("AGENT_PORT")
|
||||
.and_then(|p| p.parse().ok())
|
||||
.unwrap_or(3001),
|
||||
scan_schedule: env_var_opt("SCAN_SCHEDULE").unwrap_or_else(|| "0 0 */6 * * *".to_string()),
|
||||
cve_monitor_schedule: env_var_opt("CVE_MONITOR_SCHEDULE").unwrap_or_else(|| "0 0 0 * * *".to_string()),
|
||||
git_clone_base_path: env_var_opt("GIT_CLONE_BASE_PATH").unwrap_or_else(|| "/tmp/compliance-scanner/repos".to_string()),
|
||||
})
|
||||
}
|
||||
122
compliance-agent/src/database.rs
Normal file
122
compliance-agent/src/database.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
use mongodb::bson::doc;
|
||||
use mongodb::{Client, Collection, IndexModel};
|
||||
use mongodb::options::IndexOptions;
|
||||
|
||||
use compliance_core::models::*;
|
||||
|
||||
use crate::error::AgentError;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Database {
|
||||
inner: mongodb::Database,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn connect(uri: &str, db_name: &str) -> Result<Self, AgentError> {
|
||||
let client = Client::with_uri_str(uri).await?;
|
||||
let db = client.database(db_name);
|
||||
db.run_command(doc! { "ping": 1 }).await?;
|
||||
tracing::info!("Connected to MongoDB database '{db_name}'");
|
||||
Ok(Self { inner: db })
|
||||
}
|
||||
|
||||
pub async fn ensure_indexes(&self) -> Result<(), AgentError> {
|
||||
// repositories: unique git_url
|
||||
self.repositories()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "git_url": 1 })
|
||||
.options(IndexOptions::builder().unique(true).build())
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// findings: unique fingerprint
|
||||
self.findings()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "fingerprint": 1 })
|
||||
.options(IndexOptions::builder().unique(true).build())
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// findings: repo_id + severity compound
|
||||
self.findings()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "repo_id": 1, "severity": 1 })
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// scan_runs: repo_id + started_at descending
|
||||
self.scan_runs()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "repo_id": 1, "started_at": -1 })
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// sbom_entries: compound
|
||||
self.sbom_entries()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "repo_id": 1, "name": 1, "version": 1 })
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// cve_alerts: unique cve_id + repo_id
|
||||
self.cve_alerts()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "cve_id": 1, "repo_id": 1 })
|
||||
.options(IndexOptions::builder().unique(true).build())
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// tracker_issues: unique finding_id
|
||||
self.tracker_issues()
|
||||
.create_index(
|
||||
IndexModel::builder()
|
||||
.keys(doc! { "finding_id": 1 })
|
||||
.options(IndexOptions::builder().unique(true).build())
|
||||
.build(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
tracing::info!("Database indexes ensured");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn repositories(&self) -> Collection<TrackedRepository> {
|
||||
self.inner.collection("repositories")
|
||||
}
|
||||
|
||||
pub fn findings(&self) -> Collection<Finding> {
|
||||
self.inner.collection("findings")
|
||||
}
|
||||
|
||||
pub fn scan_runs(&self) -> Collection<ScanRun> {
|
||||
self.inner.collection("scan_runs")
|
||||
}
|
||||
|
||||
pub fn sbom_entries(&self) -> Collection<SbomEntry> {
|
||||
self.inner.collection("sbom_entries")
|
||||
}
|
||||
|
||||
pub fn cve_alerts(&self) -> Collection<CveAlert> {
|
||||
self.inner.collection("cve_alerts")
|
||||
}
|
||||
|
||||
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
|
||||
self.inner.collection("tracker_issues")
|
||||
}
|
||||
|
||||
pub fn raw_collection(&self, name: &str) -> Collection<mongodb::bson::Document> {
|
||||
self.inner.collection(name)
|
||||
}
|
||||
}
|
||||
41
compliance-agent/src/error.rs
Normal file
41
compliance-agent/src/error.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use compliance_core::CoreError;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AgentError {
|
||||
#[error(transparent)]
|
||||
Core(#[from] CoreError),
|
||||
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] mongodb::error::Error),
|
||||
|
||||
#[error("Git error: {0}")]
|
||||
Git(#[from] git2::Error),
|
||||
|
||||
#[error("HTTP error: {0}")]
|
||||
Http(#[from] reqwest::Error),
|
||||
|
||||
#[error("JSON error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("Scheduler error: {0}")]
|
||||
Scheduler(String),
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
Config(String),
|
||||
|
||||
#[error("{0}")]
|
||||
Other(String),
|
||||
}
|
||||
|
||||
impl From<AgentError> for axum::http::StatusCode {
|
||||
fn from(err: AgentError) -> Self {
|
||||
match err {
|
||||
AgentError::Core(CoreError::NotFound(_)) => axum::http::StatusCode::NOT_FOUND,
|
||||
_ => axum::http::StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
157
compliance-agent/src/llm/client.rs
Normal file
157
compliance-agent/src/llm/client.rs
Normal file
@@ -0,0 +1,157 @@
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::AgentError;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LlmClient {
|
||||
base_url: String,
|
||||
api_key: SecretString,
|
||||
model: String,
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ChatMessage {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ChatCompletionRequest {
|
||||
model: String,
|
||||
messages: Vec<ChatMessage>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
temperature: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
max_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ChatCompletionResponse {
|
||||
choices: Vec<ChatChoice>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ChatChoice {
|
||||
message: ChatResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ChatResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl LlmClient {
|
||||
pub fn new(base_url: String, api_key: SecretString, model: String) -> Self {
|
||||
Self {
|
||||
base_url,
|
||||
api_key,
|
||||
model,
|
||||
http: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn chat(
|
||||
&self,
|
||||
system_prompt: &str,
|
||||
user_prompt: &str,
|
||||
temperature: Option<f64>,
|
||||
) -> Result<String, AgentError> {
|
||||
let url = format!("{}/v1/chat/completions", self.base_url.trim_end_matches('/'));
|
||||
|
||||
let request_body = ChatCompletionRequest {
|
||||
model: self.model.clone(),
|
||||
messages: vec![
|
||||
ChatMessage {
|
||||
role: "system".to_string(),
|
||||
content: system_prompt.to_string(),
|
||||
},
|
||||
ChatMessage {
|
||||
role: "user".to_string(),
|
||||
content: user_prompt.to_string(),
|
||||
},
|
||||
],
|
||||
temperature,
|
||||
max_tokens: Some(4096),
|
||||
};
|
||||
|
||||
let mut req = self
|
||||
.http
|
||||
.post(&url)
|
||||
.header("content-type", "application/json")
|
||||
.json(&request_body);
|
||||
|
||||
let key = self.api_key.expose_secret();
|
||||
if !key.is_empty() {
|
||||
req = req.header("Authorization", format!("Bearer {key}"));
|
||||
}
|
||||
|
||||
let resp = req.send().await.map_err(|e| {
|
||||
AgentError::Other(format!("LiteLLM request failed: {e}"))
|
||||
})?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(AgentError::Other(format!("LiteLLM returned {status}: {body}")));
|
||||
}
|
||||
|
||||
let body: ChatCompletionResponse = resp.json().await.map_err(|e| {
|
||||
AgentError::Other(format!("Failed to parse LiteLLM response: {e}"))
|
||||
})?;
|
||||
|
||||
body.choices
|
||||
.first()
|
||||
.map(|c| c.message.content.clone())
|
||||
.ok_or_else(|| AgentError::Other("Empty response from LiteLLM".to_string()))
|
||||
}
|
||||
|
||||
pub async fn chat_with_messages(
|
||||
&self,
|
||||
messages: Vec<(String, String)>,
|
||||
temperature: Option<f64>,
|
||||
) -> Result<String, AgentError> {
|
||||
let url = format!("{}/v1/chat/completions", self.base_url.trim_end_matches('/'));
|
||||
|
||||
let request_body = ChatCompletionRequest {
|
||||
model: self.model.clone(),
|
||||
messages: messages
|
||||
.into_iter()
|
||||
.map(|(role, content)| ChatMessage { role, content })
|
||||
.collect(),
|
||||
temperature,
|
||||
max_tokens: Some(4096),
|
||||
};
|
||||
|
||||
let mut req = self
|
||||
.http
|
||||
.post(&url)
|
||||
.header("content-type", "application/json")
|
||||
.json(&request_body);
|
||||
|
||||
let key = self.api_key.expose_secret();
|
||||
if !key.is_empty() {
|
||||
req = req.header("Authorization", format!("Bearer {key}"));
|
||||
}
|
||||
|
||||
let resp = req.send().await.map_err(|e| {
|
||||
AgentError::Other(format!("LiteLLM request failed: {e}"))
|
||||
})?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(AgentError::Other(format!("LiteLLM returned {status}: {body}")));
|
||||
}
|
||||
|
||||
let body: ChatCompletionResponse = resp.json().await.map_err(|e| {
|
||||
AgentError::Other(format!("Failed to parse LiteLLM response: {e}"))
|
||||
})?;
|
||||
|
||||
body.choices
|
||||
.first()
|
||||
.map(|c| c.message.content.clone())
|
||||
.ok_or_else(|| AgentError::Other("Empty response from LiteLLM".to_string()))
|
||||
}
|
||||
}
|
||||
65
compliance-agent/src/llm/descriptions.rs
Normal file
65
compliance-agent/src/llm/descriptions.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::models::Finding;
|
||||
|
||||
use crate::error::AgentError;
|
||||
use crate::llm::LlmClient;
|
||||
|
||||
const DESCRIPTION_SYSTEM_PROMPT: &str = r#"You are a security engineer writing issue descriptions for a bug tracker. Generate a clear, actionable issue body in Markdown format that includes:
|
||||
|
||||
1. **Summary**: 1-2 sentence overview
|
||||
2. **Evidence**: Code location, snippet, and what was detected
|
||||
3. **Impact**: What could happen if not fixed
|
||||
4. **Remediation**: Step-by-step fix instructions
|
||||
5. **References**: Relevant CWE/CVE links if applicable
|
||||
|
||||
Keep it concise and professional. Use code blocks for code snippets."#;
|
||||
|
||||
pub async fn generate_issue_description(
|
||||
llm: &Arc<LlmClient>,
|
||||
finding: &Finding,
|
||||
) -> Result<(String, String), AgentError> {
|
||||
let user_prompt = format!(
|
||||
"Generate an issue title and body for this finding:\n\
|
||||
Scanner: {}\n\
|
||||
Type: {}\n\
|
||||
Severity: {}\n\
|
||||
Rule: {}\n\
|
||||
Title: {}\n\
|
||||
Description: {}\n\
|
||||
File: {}\n\
|
||||
Line: {}\n\
|
||||
Code:\n```\n{}\n```\n\
|
||||
CWE: {}\n\
|
||||
CVE: {}\n\
|
||||
Remediation hint: {}",
|
||||
finding.scanner,
|
||||
finding.scan_type,
|
||||
finding.severity,
|
||||
finding.rule_id.as_deref().unwrap_or("N/A"),
|
||||
finding.title,
|
||||
finding.description,
|
||||
finding.file_path.as_deref().unwrap_or("N/A"),
|
||||
finding.line_number.map(|n| n.to_string()).unwrap_or_else(|| "N/A".to_string()),
|
||||
finding.code_snippet.as_deref().unwrap_or("N/A"),
|
||||
finding.cwe.as_deref().unwrap_or("N/A"),
|
||||
finding.cve.as_deref().unwrap_or("N/A"),
|
||||
finding.remediation.as_deref().unwrap_or("N/A"),
|
||||
);
|
||||
|
||||
let response = llm.chat(DESCRIPTION_SYSTEM_PROMPT, &user_prompt, Some(0.3)).await?;
|
||||
|
||||
// Extract title from first line, rest is body
|
||||
let mut lines = response.lines();
|
||||
let title = lines
|
||||
.next()
|
||||
.unwrap_or(&finding.title)
|
||||
.trim_start_matches('#')
|
||||
.trim()
|
||||
.to_string();
|
||||
let body = lines.collect::<Vec<_>>().join("\n").trim().to_string();
|
||||
|
||||
let body = if body.is_empty() { response } else { body };
|
||||
|
||||
Ok((title, body))
|
||||
}
|
||||
27
compliance-agent/src/llm/fixes.rs
Normal file
27
compliance-agent/src/llm/fixes.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::models::Finding;
|
||||
|
||||
use crate::error::AgentError;
|
||||
use crate::llm::LlmClient;
|
||||
|
||||
const FIX_SYSTEM_PROMPT: &str = r#"You are a security engineer. Given a security finding with code context, suggest a concrete code fix. Return ONLY the fixed code snippet that can directly replace the vulnerable code. Include brief inline comments explaining the fix."#;
|
||||
|
||||
pub async fn suggest_fix(
|
||||
llm: &Arc<LlmClient>,
|
||||
finding: &Finding,
|
||||
) -> Result<String, AgentError> {
|
||||
let user_prompt = format!(
|
||||
"Suggest a fix for this vulnerability:\n\
|
||||
Language context from file: {}\n\
|
||||
Rule: {}\n\
|
||||
Description: {}\n\
|
||||
Vulnerable code:\n```\n{}\n```",
|
||||
finding.file_path.as_deref().unwrap_or("unknown"),
|
||||
finding.rule_id.as_deref().unwrap_or("N/A"),
|
||||
finding.description,
|
||||
finding.code_snippet.as_deref().unwrap_or("N/A"),
|
||||
);
|
||||
|
||||
llm.chat(FIX_SYSTEM_PROMPT, &user_prompt, Some(0.2)).await
|
||||
}
|
||||
10
compliance-agent/src/llm/mod.rs
Normal file
10
compliance-agent/src/llm/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
pub mod client;
|
||||
#[allow(dead_code)]
|
||||
pub mod descriptions;
|
||||
#[allow(dead_code)]
|
||||
pub mod fixes;
|
||||
#[allow(dead_code)]
|
||||
pub mod pr_review;
|
||||
pub mod triage;
|
||||
|
||||
pub use client::LlmClient;
|
||||
77
compliance-agent/src/llm/pr_review.rs
Normal file
77
compliance-agent/src/llm/pr_review.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::models::Finding;
|
||||
use compliance_core::traits::issue_tracker::ReviewComment;
|
||||
|
||||
use crate::error::AgentError;
|
||||
use crate::llm::LlmClient;
|
||||
|
||||
const PR_REVIEW_SYSTEM_PROMPT: &str = r#"You are a security-focused code reviewer. Given a list of security findings in a PR diff, generate concise review comments. Each comment should:
|
||||
1. Briefly explain the issue
|
||||
2. Suggest a specific fix
|
||||
3. Reference the relevant security standard (CWE, OWASP) if applicable
|
||||
|
||||
Be constructive and professional. Return JSON array:
|
||||
[{"path": "file.rs", "line": 42, "body": "..."}]"#;
|
||||
|
||||
pub async fn generate_pr_review(
|
||||
llm: &Arc<LlmClient>,
|
||||
findings: &[Finding],
|
||||
) -> Result<(String, Vec<ReviewComment>), AgentError> {
|
||||
if findings.is_empty() {
|
||||
return Ok(("No security issues found in this PR.".to_string(), Vec::new()));
|
||||
}
|
||||
|
||||
let findings_text: Vec<String> = findings
|
||||
.iter()
|
||||
.map(|f| {
|
||||
format!(
|
||||
"- [{severity}] {title} in {file}:{line}\n Code: {code}\n Rule: {rule}",
|
||||
severity = f.severity,
|
||||
title = f.title,
|
||||
file = f.file_path.as_deref().unwrap_or("unknown"),
|
||||
line = f.line_number.map(|n| n.to_string()).unwrap_or_else(|| "?".to_string()),
|
||||
code = f.code_snippet.as_deref().unwrap_or("N/A"),
|
||||
rule = f.rule_id.as_deref().unwrap_or("N/A"),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let user_prompt = format!(
|
||||
"Generate review comments for these {} findings:\n{}",
|
||||
findings.len(),
|
||||
findings_text.join("\n"),
|
||||
);
|
||||
|
||||
let response = llm.chat(PR_REVIEW_SYSTEM_PROMPT, &user_prompt, Some(0.3)).await?;
|
||||
|
||||
// Parse comments from LLM response
|
||||
let comments: Vec<ReviewComment> = serde_json::from_str::<Vec<PrComment>>(&response)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|c| ReviewComment {
|
||||
path: c.path,
|
||||
line: c.line,
|
||||
body: c.body,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let summary = format!(
|
||||
"## Security Review\n\nFound **{}** potential security issue(s) in this PR.\n\n{}",
|
||||
findings.len(),
|
||||
findings
|
||||
.iter()
|
||||
.map(|f| format!("- **[{}]** {} in `{}`", f.severity, f.title, f.file_path.as_deref().unwrap_or("unknown")))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
);
|
||||
|
||||
Ok((summary, comments))
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct PrComment {
|
||||
path: String,
|
||||
line: u32,
|
||||
body: String,
|
||||
}
|
||||
73
compliance-agent/src/llm/triage.rs
Normal file
73
compliance-agent/src/llm/triage.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::models::{Finding, FindingStatus};
|
||||
|
||||
use crate::llm::LlmClient;
|
||||
|
||||
const TRIAGE_SYSTEM_PROMPT: &str = r#"You are a security finding triage expert. Analyze the following security finding and determine:
|
||||
1. Is this a true positive? (yes/no)
|
||||
2. Confidence score (0-10, where 10 is highest confidence this is a real issue)
|
||||
3. Brief remediation suggestion (1-2 sentences)
|
||||
|
||||
Respond in JSON format:
|
||||
{"true_positive": true/false, "confidence": N, "remediation": "..."}"#;
|
||||
|
||||
pub async fn triage_findings(llm: &Arc<LlmClient>, findings: &mut Vec<Finding>) -> usize {
|
||||
let mut passed = 0;
|
||||
|
||||
for finding in findings.iter_mut() {
|
||||
let user_prompt = format!(
|
||||
"Scanner: {}\nRule: {}\nSeverity: {}\nTitle: {}\nDescription: {}\nFile: {}\nLine: {}\nCode: {}",
|
||||
finding.scanner,
|
||||
finding.rule_id.as_deref().unwrap_or("N/A"),
|
||||
finding.severity,
|
||||
finding.title,
|
||||
finding.description,
|
||||
finding.file_path.as_deref().unwrap_or("N/A"),
|
||||
finding.line_number.map(|n| n.to_string()).unwrap_or_else(|| "N/A".to_string()),
|
||||
finding.code_snippet.as_deref().unwrap_or("N/A"),
|
||||
);
|
||||
|
||||
match llm.chat(TRIAGE_SYSTEM_PROMPT, &user_prompt, Some(0.1)).await {
|
||||
Ok(response) => {
|
||||
if let Ok(result) = serde_json::from_str::<TriageResult>(&response) {
|
||||
finding.confidence = Some(result.confidence);
|
||||
if let Some(remediation) = result.remediation {
|
||||
finding.remediation = Some(remediation);
|
||||
}
|
||||
|
||||
if result.confidence >= 3.0 {
|
||||
finding.status = FindingStatus::Triaged;
|
||||
passed += 1;
|
||||
} else {
|
||||
finding.status = FindingStatus::FalsePositive;
|
||||
}
|
||||
} else {
|
||||
// If LLM response doesn't parse, keep the finding
|
||||
finding.status = FindingStatus::Triaged;
|
||||
passed += 1;
|
||||
tracing::warn!("Failed to parse triage response for {}: {response}", finding.fingerprint);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
// On LLM error, keep the finding
|
||||
tracing::warn!("LLM triage failed for {}: {e}", finding.fingerprint);
|
||||
finding.status = FindingStatus::Triaged;
|
||||
passed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove false positives
|
||||
findings.retain(|f| f.status != FindingStatus::FalsePositive);
|
||||
passed
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct TriageResult {
|
||||
#[serde(default)]
|
||||
true_positive: bool,
|
||||
#[serde(default)]
|
||||
confidence: f64,
|
||||
remediation: Option<String>,
|
||||
}
|
||||
53
compliance-agent/src/main.rs
Normal file
53
compliance-agent/src/main.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
mod agent;
|
||||
mod config;
|
||||
mod database;
|
||||
mod error;
|
||||
mod api;
|
||||
mod llm;
|
||||
mod pipeline;
|
||||
mod scheduler;
|
||||
#[allow(dead_code)]
|
||||
mod trackers;
|
||||
mod webhooks;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")))
|
||||
.init();
|
||||
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
tracing::info!("Loading configuration...");
|
||||
let config = config::load_config()?;
|
||||
|
||||
tracing::info!("Connecting to MongoDB...");
|
||||
let db = database::Database::connect(&config.mongodb_uri, &config.mongodb_database).await?;
|
||||
db.ensure_indexes().await?;
|
||||
|
||||
let agent = agent::ComplianceAgent::new(config.clone(), db.clone());
|
||||
|
||||
tracing::info!("Starting scheduler...");
|
||||
let scheduler_agent = agent.clone();
|
||||
let scheduler_handle = tokio::spawn(async move {
|
||||
if let Err(e) = scheduler::start_scheduler(&scheduler_agent).await {
|
||||
tracing::error!("Scheduler error: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
tracing::info!("Starting webhook server...");
|
||||
let webhook_agent = agent.clone();
|
||||
let webhook_handle = tokio::spawn(async move {
|
||||
if let Err(e) = webhooks::start_webhook_server(&webhook_agent).await {
|
||||
tracing::error!("Webhook server error: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
tracing::info!("Starting REST API on port {}...", config.agent_port);
|
||||
api::start_api_server(agent, config.agent_port).await?;
|
||||
|
||||
let _ = tokio::join!(scheduler_handle, webhook_handle);
|
||||
Ok(())
|
||||
}
|
||||
199
compliance-agent/src/pipeline/cve.rs
Normal file
199
compliance-agent/src/pipeline/cve.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
use compliance_core::models::{CveAlert, CveSource, SbomEntry, VulnRef};
|
||||
use compliance_core::CoreError;
|
||||
|
||||
pub struct CveScanner {
|
||||
http: reqwest::Client,
|
||||
searxng_url: Option<String>,
|
||||
nvd_api_key: Option<String>,
|
||||
}
|
||||
|
||||
impl CveScanner {
|
||||
pub fn new(http: reqwest::Client, searxng_url: Option<String>, nvd_api_key: Option<String>) -> Self {
|
||||
Self { http, searxng_url, nvd_api_key }
|
||||
}
|
||||
|
||||
pub async fn scan_dependencies(
|
||||
&self,
|
||||
repo_id: &str,
|
||||
entries: &mut [SbomEntry],
|
||||
) -> Result<Vec<CveAlert>, CoreError> {
|
||||
let mut alerts = Vec::new();
|
||||
|
||||
// Batch query OSV.dev
|
||||
let osv_results = self.query_osv_batch(entries).await?;
|
||||
for (idx, vulns) in osv_results.into_iter().enumerate() {
|
||||
if let Some(entry) = entries.get_mut(idx) {
|
||||
for vuln in &vulns {
|
||||
entry.known_vulnerabilities.push(VulnRef {
|
||||
id: vuln.id.clone(),
|
||||
source: "osv".to_string(),
|
||||
severity: vuln.severity.clone(),
|
||||
url: Some(format!("https://osv.dev/vulnerability/{}", vuln.id)),
|
||||
});
|
||||
|
||||
let mut alert = CveAlert::new(
|
||||
vuln.id.clone(),
|
||||
repo_id.to_string(),
|
||||
entry.name.clone(),
|
||||
entry.version.clone(),
|
||||
CveSource::Osv,
|
||||
);
|
||||
alert.summary = vuln.summary.clone();
|
||||
alerts.push(alert);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enrich with NVD CVSS scores
|
||||
for alert in &mut alerts {
|
||||
if let Ok(Some(cvss)) = self.query_nvd(&alert.cve_id).await {
|
||||
alert.cvss_score = Some(cvss);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(alerts)
|
||||
}
|
||||
|
||||
async fn query_osv_batch(&self, entries: &[SbomEntry]) -> Result<Vec<Vec<OsvVuln>>, CoreError> {
|
||||
let queries: Vec<_> = entries
|
||||
.iter()
|
||||
.filter_map(|e| {
|
||||
e.purl.as_ref().map(|purl| {
|
||||
serde_json::json!({
|
||||
"package": { "purl": purl }
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if queries.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let body = serde_json::json!({ "queries": queries });
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post("https://api.osv.dev/v1/querybatch")
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::Http(format!("OSV.dev request failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
tracing::warn!("OSV.dev returned {status}: {body}");
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let result: OsvBatchResponse = resp.json().await.map_err(|e| {
|
||||
CoreError::Http(format!("Failed to parse OSV.dev response: {e}"))
|
||||
})?;
|
||||
|
||||
let vulns = result
|
||||
.results
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
r.vulns
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|v| OsvVuln {
|
||||
id: v.id,
|
||||
summary: v.summary,
|
||||
severity: v.database_specific
|
||||
.and_then(|d| d.get("severity").and_then(|s| s.as_str()).map(String::from)),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vulns)
|
||||
}
|
||||
|
||||
async fn query_nvd(&self, cve_id: &str) -> Result<Option<f64>, CoreError> {
|
||||
if !cve_id.starts_with("CVE-") {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let url = format!("https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={cve_id}");
|
||||
let mut req = self.http.get(&url);
|
||||
|
||||
if let Some(key) = &self.nvd_api_key {
|
||||
req = req.header("apiKey", key.as_str());
|
||||
}
|
||||
|
||||
let resp = req.send().await.map_err(|e| {
|
||||
CoreError::Http(format!("NVD request failed: {e}"))
|
||||
})?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let body: serde_json::Value = resp.json().await.map_err(|e| {
|
||||
CoreError::Http(format!("Failed to parse NVD response: {e}"))
|
||||
})?;
|
||||
|
||||
// Extract CVSS v3.1 base score
|
||||
let score = body["vulnerabilities"]
|
||||
.as_array()
|
||||
.and_then(|v| v.first())
|
||||
.and_then(|v| v["cve"]["metrics"]["cvssMetricV31"].as_array())
|
||||
.and_then(|m| m.first())
|
||||
.and_then(|m| m["cvssData"]["baseScore"].as_f64());
|
||||
|
||||
Ok(score)
|
||||
}
|
||||
|
||||
pub async fn search_context(&self, cve_id: &str) -> Result<Vec<String>, CoreError> {
|
||||
let Some(searxng_url) = &self.searxng_url else {
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let url = format!("{}/search?q={cve_id}&format=json&engines=duckduckgo", searxng_url.trim_end_matches('/'));
|
||||
let resp = self.http.get(&url).send().await.map_err(|e| {
|
||||
CoreError::Http(format!("SearXNG request failed: {e}"))
|
||||
})?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap_or_default();
|
||||
let results = body["results"]
|
||||
.as_array()
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.take(5)
|
||||
.filter_map(|r| r["url"].as_str().map(String::from))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct OsvBatchResponse {
|
||||
results: Vec<OsvBatchResult>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct OsvBatchResult {
|
||||
vulns: Option<Vec<OsvVulnEntry>>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct OsvVulnEntry {
|
||||
id: String,
|
||||
summary: Option<String>,
|
||||
database_specific: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
struct OsvVuln {
|
||||
id: String,
|
||||
summary: Option<String>,
|
||||
severity: Option<String>,
|
||||
}
|
||||
10
compliance-agent/src/pipeline/dedup.rs
Normal file
10
compliance-agent/src/pipeline/dedup.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
pub fn compute_fingerprint(parts: &[&str]) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
for part in parts {
|
||||
hasher.update(part.as_bytes());
|
||||
hasher.update(b"|");
|
||||
}
|
||||
hex::encode(hasher.finalize())
|
||||
}
|
||||
100
compliance-agent/src/pipeline/git.rs
Normal file
100
compliance-agent/src/pipeline/git.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use git2::{FetchOptions, Repository};
|
||||
|
||||
use crate::error::AgentError;
|
||||
|
||||
pub struct GitOps {
|
||||
base_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GitOps {
|
||||
pub fn new(base_path: &str) -> Self {
|
||||
Self {
|
||||
base_path: PathBuf::from(base_path),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clone_or_fetch(&self, git_url: &str, repo_name: &str) -> Result<PathBuf, AgentError> {
|
||||
let repo_path = self.base_path.join(repo_name);
|
||||
|
||||
if repo_path.exists() {
|
||||
self.fetch(&repo_path)?;
|
||||
} else {
|
||||
std::fs::create_dir_all(&repo_path)?;
|
||||
Repository::clone(git_url, &repo_path)?;
|
||||
tracing::info!("Cloned {git_url} to {}", repo_path.display());
|
||||
}
|
||||
|
||||
Ok(repo_path)
|
||||
}
|
||||
|
||||
fn fetch(&self, repo_path: &Path) -> Result<(), AgentError> {
|
||||
let repo = Repository::open(repo_path)?;
|
||||
let mut remote = repo.find_remote("origin")?;
|
||||
let mut fetch_opts = FetchOptions::new();
|
||||
remote.fetch(&[] as &[&str], Some(&mut fetch_opts), None)?;
|
||||
|
||||
// Fast-forward to origin/HEAD
|
||||
let fetch_head = repo.find_reference("FETCH_HEAD")?;
|
||||
let fetch_commit = repo.reference_to_annotated_commit(&fetch_head)?;
|
||||
let head_ref = repo.head()?;
|
||||
let head_name = head_ref.name().unwrap_or("HEAD");
|
||||
|
||||
repo.reference(
|
||||
head_name,
|
||||
fetch_commit.id(),
|
||||
true,
|
||||
"fast-forward",
|
||||
)?;
|
||||
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
|
||||
|
||||
tracing::info!("Fetched and fast-forwarded {}", repo_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_head_sha(repo_path: &Path) -> Result<String, AgentError> {
|
||||
let repo = Repository::open(repo_path)?;
|
||||
let head = repo.head()?;
|
||||
let commit = head.peel_to_commit()?;
|
||||
Ok(commit.id().to_string())
|
||||
}
|
||||
|
||||
pub fn has_new_commits(repo_path: &Path, last_sha: Option<&str>) -> Result<bool, AgentError> {
|
||||
let current_sha = Self::get_head_sha(repo_path)?;
|
||||
match last_sha {
|
||||
Some(sha) if sha == current_sha => Ok(false),
|
||||
_ => Ok(true),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_changed_files(
|
||||
repo_path: &Path,
|
||||
old_sha: &str,
|
||||
new_sha: &str,
|
||||
) -> Result<Vec<String>, AgentError> {
|
||||
let repo = Repository::open(repo_path)?;
|
||||
let old_commit = repo.find_commit(git2::Oid::from_str(old_sha)?)?;
|
||||
let new_commit = repo.find_commit(git2::Oid::from_str(new_sha)?)?;
|
||||
|
||||
let old_tree = old_commit.tree()?;
|
||||
let new_tree = new_commit.tree()?;
|
||||
|
||||
let diff = repo.diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None)?;
|
||||
|
||||
let mut files = Vec::new();
|
||||
diff.foreach(
|
||||
&mut |delta, _| {
|
||||
if let Some(path) = delta.new_file().path() {
|
||||
files.push(path.to_string_lossy().to_string());
|
||||
}
|
||||
true
|
||||
},
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
7
compliance-agent/src/pipeline/mod.rs
Normal file
7
compliance-agent/src/pipeline/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod cve;
|
||||
pub mod dedup;
|
||||
pub mod git;
|
||||
pub mod orchestrator;
|
||||
pub mod patterns;
|
||||
pub mod sbom;
|
||||
pub mod semgrep;
|
||||
252
compliance-agent/src/pipeline/orchestrator.rs
Normal file
252
compliance-agent/src/pipeline/orchestrator.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use mongodb::bson::doc;
|
||||
|
||||
use compliance_core::models::*;
|
||||
use compliance_core::traits::Scanner;
|
||||
use compliance_core::AgentConfig;
|
||||
|
||||
use crate::database::Database;
|
||||
use crate::error::AgentError;
|
||||
use crate::llm::LlmClient;
|
||||
use crate::pipeline::cve::CveScanner;
|
||||
use crate::pipeline::git::GitOps;
|
||||
use crate::pipeline::patterns::{GdprPatternScanner, OAuthPatternScanner};
|
||||
use crate::pipeline::sbom::SbomScanner;
|
||||
use crate::pipeline::semgrep::SemgrepScanner;
|
||||
|
||||
pub struct PipelineOrchestrator {
|
||||
config: AgentConfig,
|
||||
db: Database,
|
||||
llm: Arc<LlmClient>,
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl PipelineOrchestrator {
|
||||
pub fn new(
|
||||
config: AgentConfig,
|
||||
db: Database,
|
||||
llm: Arc<LlmClient>,
|
||||
http: reqwest::Client,
|
||||
) -> Self {
|
||||
Self { config, db, llm, http }
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
&self,
|
||||
repo_id: &str,
|
||||
trigger: ScanTrigger,
|
||||
) -> Result<(), AgentError> {
|
||||
// Look up the repository
|
||||
let repo = self
|
||||
.db
|
||||
.repositories()
|
||||
.find_one(doc! { "_id": mongodb::bson::oid::ObjectId::parse_str(repo_id).map_err(|e| AgentError::Other(e.to_string()))? })
|
||||
.await?
|
||||
.ok_or_else(|| AgentError::Other(format!("Repository {repo_id} not found")))?;
|
||||
|
||||
// Create scan run
|
||||
let scan_run = ScanRun::new(repo_id.to_string(), trigger);
|
||||
let insert = self.db.scan_runs().insert_one(&scan_run).await?;
|
||||
let scan_run_id = insert.inserted_id.as_object_id()
|
||||
.map(|id| id.to_hex())
|
||||
.unwrap_or_default();
|
||||
|
||||
let result = self.run_pipeline(&repo, &scan_run_id).await;
|
||||
|
||||
// Update scan run status
|
||||
match &result {
|
||||
Ok(count) => {
|
||||
self.db.scan_runs().update_one(
|
||||
doc! { "_id": &insert.inserted_id },
|
||||
doc! {
|
||||
"$set": {
|
||||
"status": "completed",
|
||||
"current_phase": "completed",
|
||||
"new_findings_count": *count as i64,
|
||||
"completed_at": mongodb::bson::DateTime::now(),
|
||||
}
|
||||
},
|
||||
).await?;
|
||||
}
|
||||
Err(e) => {
|
||||
self.db.scan_runs().update_one(
|
||||
doc! { "_id": &insert.inserted_id },
|
||||
doc! {
|
||||
"$set": {
|
||||
"status": "failed",
|
||||
"error_message": e.to_string(),
|
||||
"completed_at": mongodb::bson::DateTime::now(),
|
||||
}
|
||||
},
|
||||
).await?;
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
async fn run_pipeline(
|
||||
&self,
|
||||
repo: &TrackedRepository,
|
||||
scan_run_id: &str,
|
||||
) -> Result<u32, AgentError> {
|
||||
let repo_id = repo.id.as_ref()
|
||||
.map(|id| id.to_hex())
|
||||
.unwrap_or_default();
|
||||
|
||||
// Stage 0: Change detection
|
||||
tracing::info!("[{repo_id}] Stage 0: Change detection");
|
||||
let git_ops = GitOps::new(&self.config.git_clone_base_path);
|
||||
let repo_path = git_ops.clone_or_fetch(&repo.git_url, &repo.name)?;
|
||||
|
||||
if !GitOps::has_new_commits(&repo_path, repo.last_scanned_commit.as_deref())? {
|
||||
tracing::info!("[{repo_id}] No new commits, skipping scan");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let current_sha = GitOps::get_head_sha(&repo_path)?;
|
||||
let mut all_findings: Vec<Finding> = Vec::new();
|
||||
|
||||
// Stage 1: Semgrep SAST
|
||||
tracing::info!("[{repo_id}] Stage 1: Semgrep SAST");
|
||||
self.update_phase(scan_run_id, "sast").await;
|
||||
let semgrep = SemgrepScanner;
|
||||
match semgrep.scan(&repo_path, &repo_id).await {
|
||||
Ok(output) => all_findings.extend(output.findings),
|
||||
Err(e) => tracing::warn!("[{repo_id}] Semgrep failed: {e}"),
|
||||
}
|
||||
|
||||
// Stage 2: SBOM Generation
|
||||
tracing::info!("[{repo_id}] Stage 2: SBOM Generation");
|
||||
self.update_phase(scan_run_id, "sbom_generation").await;
|
||||
let sbom_scanner = SbomScanner;
|
||||
let mut sbom_entries = match sbom_scanner.scan(&repo_path, &repo_id).await {
|
||||
Ok(output) => output.sbom_entries,
|
||||
Err(e) => {
|
||||
tracing::warn!("[{repo_id}] SBOM generation failed: {e}");
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
|
||||
// Stage 3: CVE Scanning
|
||||
tracing::info!("[{repo_id}] Stage 3: CVE Scanning");
|
||||
self.update_phase(scan_run_id, "cve_scanning").await;
|
||||
let cve_scanner = CveScanner::new(
|
||||
self.http.clone(),
|
||||
self.config.searxng_url.clone(),
|
||||
self.config.nvd_api_key.as_ref().map(|k| {
|
||||
use secrecy::ExposeSecret;
|
||||
k.expose_secret().to_string()
|
||||
}),
|
||||
);
|
||||
let cve_alerts = match cve_scanner.scan_dependencies(&repo_id, &mut sbom_entries).await {
|
||||
Ok(alerts) => alerts,
|
||||
Err(e) => {
|
||||
tracing::warn!("[{repo_id}] CVE scanning failed: {e}");
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
|
||||
// Stage 4: Pattern Scanning (GDPR + OAuth)
|
||||
tracing::info!("[{repo_id}] Stage 4: Pattern Scanning");
|
||||
self.update_phase(scan_run_id, "pattern_scanning").await;
|
||||
let gdpr = GdprPatternScanner::new();
|
||||
match gdpr.scan(&repo_path, &repo_id).await {
|
||||
Ok(output) => all_findings.extend(output.findings),
|
||||
Err(e) => tracing::warn!("[{repo_id}] GDPR pattern scan failed: {e}"),
|
||||
}
|
||||
let oauth = OAuthPatternScanner::new();
|
||||
match oauth.scan(&repo_path, &repo_id).await {
|
||||
Ok(output) => all_findings.extend(output.findings),
|
||||
Err(e) => tracing::warn!("[{repo_id}] OAuth pattern scan failed: {e}"),
|
||||
}
|
||||
|
||||
// Stage 5: LLM Triage
|
||||
tracing::info!("[{repo_id}] Stage 5: LLM Triage ({} findings)", all_findings.len());
|
||||
self.update_phase(scan_run_id, "llm_triage").await;
|
||||
let triaged = crate::llm::triage::triage_findings(&self.llm, &mut all_findings).await;
|
||||
tracing::info!("[{repo_id}] Triaged: {triaged} findings passed confidence threshold");
|
||||
|
||||
// Dedup against existing findings and insert new ones
|
||||
let mut new_count = 0u32;
|
||||
for mut finding in all_findings {
|
||||
finding.scan_run_id = Some(scan_run_id.to_string());
|
||||
// Check if fingerprint already exists
|
||||
let existing = self
|
||||
.db
|
||||
.findings()
|
||||
.find_one(doc! { "fingerprint": &finding.fingerprint })
|
||||
.await?;
|
||||
if existing.is_none() {
|
||||
self.db.findings().insert_one(&finding).await?;
|
||||
new_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Persist SBOM entries (upsert by repo_id + name + version)
|
||||
for entry in &sbom_entries {
|
||||
let filter = doc! {
|
||||
"repo_id": &entry.repo_id,
|
||||
"name": &entry.name,
|
||||
"version": &entry.version,
|
||||
};
|
||||
let update = mongodb::bson::to_document(entry)
|
||||
.map(|d| doc! { "$set": d })
|
||||
.unwrap_or_else(|_| doc! {});
|
||||
self.db
|
||||
.sbom_entries()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Persist CVE alerts (upsert by cve_id + repo_id)
|
||||
for alert in &cve_alerts {
|
||||
let filter = doc! {
|
||||
"cve_id": &alert.cve_id,
|
||||
"repo_id": &alert.repo_id,
|
||||
};
|
||||
let update = mongodb::bson::to_document(alert)
|
||||
.map(|d| doc! { "$set": d })
|
||||
.unwrap_or_else(|_| doc! {});
|
||||
self.db
|
||||
.cve_alerts()
|
||||
.update_one(filter, update)
|
||||
.upsert(true)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Stage 6: Issue Creation
|
||||
tracing::info!("[{repo_id}] Stage 6: Issue Creation");
|
||||
self.update_phase(scan_run_id, "issue_creation").await;
|
||||
// Issue creation is handled by the trackers module - deferred to agent
|
||||
|
||||
// Stage 7: Update repository
|
||||
self.db.repositories().update_one(
|
||||
doc! { "_id": repo.id },
|
||||
doc! {
|
||||
"$set": {
|
||||
"last_scanned_commit": ¤t_sha,
|
||||
"updated_at": mongodb::bson::DateTime::now(),
|
||||
},
|
||||
"$inc": { "findings_count": new_count as i64 },
|
||||
},
|
||||
).await?;
|
||||
|
||||
tracing::info!("[{repo_id}] Scan complete: {new_count} new findings");
|
||||
Ok(new_count)
|
||||
}
|
||||
|
||||
async fn update_phase(&self, scan_run_id: &str, phase: &str) {
|
||||
if let Ok(oid) = mongodb::bson::oid::ObjectId::parse_str(scan_run_id) {
|
||||
let _ = self.db.scan_runs().update_one(
|
||||
doc! { "_id": oid },
|
||||
doc! {
|
||||
"$set": { "current_phase": phase },
|
||||
"$push": { "phases_completed": phase },
|
||||
},
|
||||
).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
226
compliance-agent/src/pipeline/patterns.rs
Normal file
226
compliance-agent/src/pipeline/patterns.rs
Normal file
@@ -0,0 +1,226 @@
|
||||
use std::path::Path;
|
||||
|
||||
use compliance_core::models::{Finding, ScanType, Severity};
|
||||
use compliance_core::traits::{ScanOutput, Scanner};
|
||||
use compliance_core::CoreError;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::pipeline::dedup;
|
||||
|
||||
pub struct GdprPatternScanner {
|
||||
patterns: Vec<PatternRule>,
|
||||
}
|
||||
|
||||
pub struct OAuthPatternScanner {
|
||||
patterns: Vec<PatternRule>,
|
||||
}
|
||||
|
||||
struct PatternRule {
|
||||
id: String,
|
||||
title: String,
|
||||
description: String,
|
||||
pattern: Regex,
|
||||
severity: Severity,
|
||||
file_extensions: Vec<String>,
|
||||
}
|
||||
|
||||
impl GdprPatternScanner {
|
||||
pub fn new() -> Self {
|
||||
let patterns = vec![
|
||||
PatternRule {
|
||||
id: "gdpr-pii-logging".to_string(),
|
||||
title: "PII data potentially logged".to_string(),
|
||||
description: "Logging statements that may contain personally identifiable information (email, SSN, phone, IP address).".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(log|print|console\.|logger\.|tracing::)\s*[\.(].*\b(email|ssn|social.?security|phone.?number|ip.?addr|passport|date.?of.?birth|credit.?card)\b"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::High,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go", "rb"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "gdpr-no-consent".to_string(),
|
||||
title: "Data collection without apparent consent mechanism".to_string(),
|
||||
description: "Data collection endpoint that doesn't reference consent or opt-in mechanisms.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(collect|store|save|persist|record).*\b(personal|user.?data|pii|biometric)\b"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::Medium,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "gdpr-no-delete-endpoint".to_string(),
|
||||
title: "Missing data deletion capability".to_string(),
|
||||
description: "User data models or controllers without corresponding deletion endpoints (right to erasure).".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(class|struct|model)\s+User(?!.*[Dd]elete)"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::Medium,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go", "rb"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "gdpr-hardcoded-retention".to_string(),
|
||||
title: "Hardcoded data retention period".to_string(),
|
||||
description: "Data retention periods should be configurable for GDPR compliance.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(retention|ttl|expire|keep.?for)\s*[=:]\s*\d+"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::Low,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go", "yaml", "yml", "toml", "json"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
];
|
||||
Self { patterns }
|
||||
}
|
||||
}
|
||||
|
||||
impl Scanner for GdprPatternScanner {
|
||||
fn name(&self) -> &str {
|
||||
"gdpr-patterns"
|
||||
}
|
||||
|
||||
fn scan_type(&self) -> ScanType {
|
||||
ScanType::Gdpr
|
||||
}
|
||||
|
||||
async fn scan(&self, repo_path: &Path, repo_id: &str) -> Result<ScanOutput, CoreError> {
|
||||
let findings = scan_with_patterns(repo_path, repo_id, &self.patterns, ScanType::Gdpr, "gdpr-patterns")?;
|
||||
Ok(ScanOutput {
|
||||
findings,
|
||||
sbom_entries: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl OAuthPatternScanner {
|
||||
pub fn new() -> Self {
|
||||
let patterns = vec![
|
||||
PatternRule {
|
||||
id: "oauth-implicit-grant".to_string(),
|
||||
title: "OAuth implicit grant flow detected".to_string(),
|
||||
description: "Implicit grant flow is deprecated and insecure. Use authorization code flow with PKCE instead.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(response_type\s*[=:]\s*["']?token|grant_type\s*[=:]\s*["']?implicit)"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::High,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go", "yaml", "yml", "json"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "oauth-missing-pkce".to_string(),
|
||||
title: "OAuth flow without PKCE".to_string(),
|
||||
description: "Authorization code flow should use PKCE (code_challenge/code_verifier) for public clients.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)authorization.?code(?!.*code.?challenge)(?!.*pkce)"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::Medium,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "oauth-token-localstorage".to_string(),
|
||||
title: "Token stored in localStorage".to_string(),
|
||||
description: "Storing tokens in localStorage is vulnerable to XSS. Use httpOnly cookies or secure session storage.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)localStorage\.(set|get)Item\s*\(\s*["'].*token"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::High,
|
||||
file_extensions: vec!["js", "ts", "jsx", "tsx"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
PatternRule {
|
||||
id: "oauth-token-url".to_string(),
|
||||
title: "Token passed in URL parameters".to_string(),
|
||||
description: "Tokens in URLs can leak via referrer headers, server logs, and browser history.".to_string(),
|
||||
pattern: Regex::new(r#"(?i)(access_token|bearer)\s*[=]\s*.*\b(url|query|param|href)\b"#).unwrap_or_else(|_| Regex::new("^$").unwrap()),
|
||||
severity: Severity::High,
|
||||
file_extensions: vec!["rs", "py", "js", "ts", "java", "go"].into_iter().map(String::from).collect(),
|
||||
},
|
||||
];
|
||||
Self { patterns }
|
||||
}
|
||||
}
|
||||
|
||||
impl Scanner for OAuthPatternScanner {
|
||||
fn name(&self) -> &str {
|
||||
"oauth-patterns"
|
||||
}
|
||||
|
||||
fn scan_type(&self) -> ScanType {
|
||||
ScanType::OAuth
|
||||
}
|
||||
|
||||
async fn scan(&self, repo_path: &Path, repo_id: &str) -> Result<ScanOutput, CoreError> {
|
||||
let findings = scan_with_patterns(repo_path, repo_id, &self.patterns, ScanType::OAuth, "oauth-patterns")?;
|
||||
Ok(ScanOutput {
|
||||
findings,
|
||||
sbom_entries: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_with_patterns(
|
||||
repo_path: &Path,
|
||||
repo_id: &str,
|
||||
patterns: &[PatternRule],
|
||||
scan_type: ScanType,
|
||||
scanner_name: &str,
|
||||
) -> Result<Vec<Finding>, CoreError> {
|
||||
let mut findings = Vec::new();
|
||||
|
||||
for entry in walkdir(repo_path)? {
|
||||
let path = entry.path();
|
||||
if !path.is_file() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let ext = path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let content = match std::fs::read_to_string(path) {
|
||||
Ok(c) => c,
|
||||
Err(_) => continue, // skip binary files
|
||||
};
|
||||
|
||||
let relative_path = path
|
||||
.strip_prefix(repo_path)
|
||||
.unwrap_or(path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
for pattern in patterns {
|
||||
if !pattern.file_extensions.contains(&ext) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (line_num, line) in content.lines().enumerate() {
|
||||
if pattern.pattern.is_match(line) {
|
||||
let fingerprint = dedup::compute_fingerprint(&[
|
||||
repo_id,
|
||||
&pattern.id,
|
||||
&relative_path,
|
||||
&(line_num + 1).to_string(),
|
||||
]);
|
||||
|
||||
let mut finding = Finding::new(
|
||||
repo_id.to_string(),
|
||||
fingerprint,
|
||||
scanner_name.to_string(),
|
||||
scan_type.clone(),
|
||||
pattern.title.clone(),
|
||||
pattern.description.clone(),
|
||||
pattern.severity.clone(),
|
||||
);
|
||||
finding.rule_id = Some(pattern.id.clone());
|
||||
finding.file_path = Some(relative_path.clone());
|
||||
finding.line_number = Some((line_num + 1) as u32);
|
||||
finding.code_snippet = Some(line.to_string());
|
||||
|
||||
findings.push(finding);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(findings)
|
||||
}
|
||||
|
||||
fn walkdir(path: &Path) -> Result<Vec<walkdir::DirEntry>, CoreError> {
|
||||
// Simple recursive file walk, skipping hidden dirs and common non-source dirs
|
||||
let skip_dirs = [".git", "node_modules", "target", "vendor", ".venv", "__pycache__", "dist", "build"];
|
||||
|
||||
let entries: Vec<_> = walkdir::WalkDir::new(path)
|
||||
.into_iter()
|
||||
.filter_entry(|e| {
|
||||
let name = e.file_name().to_string_lossy();
|
||||
!skip_dirs.contains(&name.as_ref())
|
||||
})
|
||||
.filter_map(|e| e.ok())
|
||||
.collect();
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
186
compliance-agent/src/pipeline/sbom.rs
Normal file
186
compliance-agent/src/pipeline/sbom.rs
Normal file
@@ -0,0 +1,186 @@
|
||||
use std::path::Path;
|
||||
|
||||
use compliance_core::models::{SbomEntry, ScanType, VulnRef};
|
||||
use compliance_core::traits::{ScanOutput, Scanner};
|
||||
use compliance_core::CoreError;
|
||||
|
||||
pub struct SbomScanner;
|
||||
|
||||
impl Scanner for SbomScanner {
|
||||
fn name(&self) -> &str {
|
||||
"sbom"
|
||||
}
|
||||
|
||||
fn scan_type(&self) -> ScanType {
|
||||
ScanType::Sbom
|
||||
}
|
||||
|
||||
async fn scan(&self, repo_path: &Path, repo_id: &str) -> Result<ScanOutput, CoreError> {
|
||||
let mut entries = Vec::new();
|
||||
|
||||
// Run syft for SBOM generation
|
||||
match run_syft(repo_path, repo_id).await {
|
||||
Ok(syft_entries) => entries.extend(syft_entries),
|
||||
Err(e) => tracing::warn!("syft failed: {e}"),
|
||||
}
|
||||
|
||||
// Run cargo-audit for Rust-specific vulns
|
||||
match run_cargo_audit(repo_path, repo_id).await {
|
||||
Ok(vulns) => merge_audit_vulns(&mut entries, vulns),
|
||||
Err(e) => tracing::warn!("cargo-audit skipped: {e}"),
|
||||
}
|
||||
|
||||
Ok(ScanOutput {
|
||||
findings: Vec::new(),
|
||||
sbom_entries: entries,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_syft(repo_path: &Path, repo_id: &str) -> Result<Vec<SbomEntry>, CoreError> {
|
||||
let output = tokio::process::Command::new("syft")
|
||||
.arg(repo_path)
|
||||
.args(["-o", "cyclonedx-json"])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| CoreError::Scanner {
|
||||
scanner: "syft".to_string(),
|
||||
source: Box::new(e),
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(CoreError::Scanner {
|
||||
scanner: "syft".to_string(),
|
||||
source: format!("syft exited with {}: {stderr}", output.status).into(),
|
||||
});
|
||||
}
|
||||
|
||||
let cdx: CycloneDxBom = serde_json::from_slice(&output.stdout)?;
|
||||
let entries = cdx
|
||||
.components
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|c| {
|
||||
let mut entry = SbomEntry::new(
|
||||
repo_id.to_string(),
|
||||
c.name,
|
||||
c.version.unwrap_or_else(|| "unknown".to_string()),
|
||||
c.component_type.unwrap_or_else(|| "library".to_string()),
|
||||
);
|
||||
entry.purl = c.purl;
|
||||
entry.license = c.licenses.and_then(|ls| {
|
||||
ls.first().and_then(|l| {
|
||||
l.license.as_ref().map(|lic| {
|
||||
lic.id.clone().unwrap_or_else(|| lic.name.clone().unwrap_or_default())
|
||||
})
|
||||
})
|
||||
});
|
||||
entry
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
async fn run_cargo_audit(repo_path: &Path, _repo_id: &str) -> Result<Vec<AuditVuln>, CoreError> {
|
||||
let cargo_lock = repo_path.join("Cargo.lock");
|
||||
if !cargo_lock.exists() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let output = tokio::process::Command::new("cargo")
|
||||
.args(["audit", "--json"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| CoreError::Scanner {
|
||||
scanner: "cargo-audit".to_string(),
|
||||
source: Box::new(e),
|
||||
})?;
|
||||
|
||||
let result: CargoAuditOutput = serde_json::from_slice(&output.stdout)
|
||||
.unwrap_or_else(|_| CargoAuditOutput { vulnerabilities: CargoAuditVulns { list: Vec::new() } });
|
||||
|
||||
let vulns = result
|
||||
.vulnerabilities
|
||||
.list
|
||||
.into_iter()
|
||||
.map(|v| AuditVuln {
|
||||
package: v.advisory.package,
|
||||
id: v.advisory.id,
|
||||
url: v.advisory.url,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vulns)
|
||||
}
|
||||
|
||||
fn merge_audit_vulns(entries: &mut Vec<SbomEntry>, vulns: Vec<AuditVuln>) {
|
||||
for vuln in vulns {
|
||||
if let Some(entry) = entries.iter_mut().find(|e| e.name == vuln.package) {
|
||||
entry.known_vulnerabilities.push(VulnRef {
|
||||
id: vuln.id.clone(),
|
||||
source: "cargo-audit".to_string(),
|
||||
severity: None,
|
||||
url: Some(vuln.url),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CycloneDX JSON types
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CycloneDxBom {
|
||||
components: Option<Vec<CdxComponent>>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CdxComponent {
|
||||
name: String,
|
||||
version: Option<String>,
|
||||
#[serde(rename = "type")]
|
||||
component_type: Option<String>,
|
||||
purl: Option<String>,
|
||||
licenses: Option<Vec<CdxLicenseWrapper>>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CdxLicenseWrapper {
|
||||
license: Option<CdxLicense>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CdxLicense {
|
||||
id: Option<String>,
|
||||
name: Option<String>,
|
||||
}
|
||||
|
||||
// Cargo audit types
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CargoAuditOutput {
|
||||
vulnerabilities: CargoAuditVulns,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CargoAuditVulns {
|
||||
list: Vec<CargoAuditEntry>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CargoAuditEntry {
|
||||
advisory: CargoAuditAdvisory,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct CargoAuditAdvisory {
|
||||
id: String,
|
||||
package: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
struct AuditVuln {
|
||||
package: String,
|
||||
id: String,
|
||||
url: String,
|
||||
}
|
||||
110
compliance-agent/src/pipeline/semgrep.rs
Normal file
110
compliance-agent/src/pipeline/semgrep.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
use std::path::Path;
|
||||
|
||||
use compliance_core::models::{Finding, ScanType, Severity};
|
||||
use compliance_core::traits::{ScanOutput, Scanner};
|
||||
use compliance_core::CoreError;
|
||||
|
||||
use crate::pipeline::dedup;
|
||||
|
||||
pub struct SemgrepScanner;
|
||||
|
||||
impl Scanner for SemgrepScanner {
|
||||
fn name(&self) -> &str {
|
||||
"semgrep"
|
||||
}
|
||||
|
||||
fn scan_type(&self) -> ScanType {
|
||||
ScanType::Sast
|
||||
}
|
||||
|
||||
async fn scan(&self, repo_path: &Path, repo_id: &str) -> Result<ScanOutput, CoreError> {
|
||||
let output = tokio::process::Command::new("semgrep")
|
||||
.args(["--config=auto", "--json", "--quiet"])
|
||||
.arg(repo_path)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| CoreError::Scanner {
|
||||
scanner: "semgrep".to_string(),
|
||||
source: Box::new(e),
|
||||
})?;
|
||||
|
||||
if !output.status.success() && output.stdout.is_empty() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
tracing::warn!("Semgrep exited with {}: {stderr}", output.status);
|
||||
return Ok(ScanOutput::default());
|
||||
}
|
||||
|
||||
let result: SemgrepOutput = serde_json::from_slice(&output.stdout)?;
|
||||
let findings = result
|
||||
.results
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let severity = match r.extra.severity.as_str() {
|
||||
"ERROR" => Severity::High,
|
||||
"WARNING" => Severity::Medium,
|
||||
"INFO" => Severity::Low,
|
||||
_ => Severity::Info,
|
||||
};
|
||||
|
||||
let fingerprint = dedup::compute_fingerprint(&[
|
||||
repo_id,
|
||||
&r.check_id,
|
||||
&r.path,
|
||||
&r.start.line.to_string(),
|
||||
]);
|
||||
|
||||
let mut finding = Finding::new(
|
||||
repo_id.to_string(),
|
||||
fingerprint,
|
||||
"semgrep".to_string(),
|
||||
ScanType::Sast,
|
||||
r.extra.message.clone(),
|
||||
r.extra.message,
|
||||
severity,
|
||||
);
|
||||
finding.rule_id = Some(r.check_id);
|
||||
finding.file_path = Some(r.path);
|
||||
finding.line_number = Some(r.start.line);
|
||||
finding.code_snippet = Some(r.extra.lines);
|
||||
finding.cwe = r.extra.metadata.and_then(|m| {
|
||||
m.get("cwe")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
});
|
||||
finding
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(ScanOutput {
|
||||
findings,
|
||||
sbom_entries: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SemgrepOutput {
|
||||
results: Vec<SemgrepResult>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SemgrepResult {
|
||||
check_id: String,
|
||||
path: String,
|
||||
start: SemgrepPosition,
|
||||
extra: SemgrepExtra,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SemgrepPosition {
|
||||
line: u32,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SemgrepExtra {
|
||||
message: String,
|
||||
severity: String,
|
||||
lines: String,
|
||||
#[serde(default)]
|
||||
metadata: Option<serde_json::Value>,
|
||||
}
|
||||
105
compliance-agent/src/scheduler.rs
Normal file
105
compliance-agent/src/scheduler.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use mongodb::bson::doc;
|
||||
use tokio_cron_scheduler::{Job, JobScheduler};
|
||||
|
||||
use compliance_core::models::ScanTrigger;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
use crate::error::AgentError;
|
||||
|
||||
pub async fn start_scheduler(agent: &ComplianceAgent) -> Result<(), AgentError> {
|
||||
let sched = JobScheduler::new()
|
||||
.await
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to create scheduler: {e}")))?;
|
||||
|
||||
// Periodic scan job
|
||||
let scan_agent = agent.clone();
|
||||
let scan_schedule = agent.config.scan_schedule.clone();
|
||||
let scan_job = Job::new_async(scan_schedule.as_str(), move |_uuid, _lock| {
|
||||
let agent = scan_agent.clone();
|
||||
Box::pin(async move {
|
||||
tracing::info!("Scheduled scan triggered");
|
||||
scan_all_repos(&agent).await;
|
||||
})
|
||||
})
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to create scan job: {e}")))?;
|
||||
sched.add(scan_job).await
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to add scan job: {e}")))?;
|
||||
|
||||
// CVE monitor job (daily)
|
||||
let cve_agent = agent.clone();
|
||||
let cve_schedule = agent.config.cve_monitor_schedule.clone();
|
||||
let cve_job = Job::new_async(cve_schedule.as_str(), move |_uuid, _lock| {
|
||||
let agent = cve_agent.clone();
|
||||
Box::pin(async move {
|
||||
tracing::info!("CVE monitor triggered");
|
||||
monitor_cves(&agent).await;
|
||||
})
|
||||
})
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to create CVE monitor job: {e}")))?;
|
||||
sched.add(cve_job).await
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to add CVE monitor job: {e}")))?;
|
||||
|
||||
sched.start().await
|
||||
.map_err(|e| AgentError::Scheduler(format!("Failed to start scheduler: {e}")))?;
|
||||
|
||||
tracing::info!(
|
||||
"Scheduler started: scans='{}', CVE monitor='{}'",
|
||||
agent.config.scan_schedule,
|
||||
agent.config.cve_monitor_schedule,
|
||||
);
|
||||
|
||||
// Keep scheduler alive
|
||||
loop {
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(3600)).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn scan_all_repos(agent: &ComplianceAgent) {
|
||||
use futures_util::StreamExt;
|
||||
|
||||
let cursor = match agent.db.repositories().find(doc! {}).await {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to list repos for scheduled scan: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let repos: Vec<_> = cursor
|
||||
.filter_map(|r| async { r.ok() })
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
for repo in repos {
|
||||
let repo_id = repo.id.map(|id| id.to_hex()).unwrap_or_default();
|
||||
if let Err(e) = agent.run_scan(&repo_id, ScanTrigger::Scheduled).await {
|
||||
tracing::error!("Scheduled scan failed for {}: {e}", repo.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn monitor_cves(agent: &ComplianceAgent) {
|
||||
use futures_util::StreamExt;
|
||||
|
||||
// Re-scan all SBOM entries for new CVEs
|
||||
let cursor = match agent.db.sbom_entries().find(doc! {}).await {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to list SBOM entries for CVE monitoring: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let entries: Vec<_> = cursor
|
||||
.filter_map(|r| async { r.ok() })
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
if entries.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
tracing::info!("CVE monitor: checking {} dependencies", entries.len());
|
||||
// The actual CVE checking is handled by the CveScanner in the pipeline
|
||||
// This is a simplified version that just logs the activity
|
||||
}
|
||||
161
compliance-agent/src/trackers/github.rs
Normal file
161
compliance-agent/src/trackers/github.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
use compliance_core::error::CoreError;
|
||||
use compliance_core::models::{TrackerIssue, TrackerType};
|
||||
use compliance_core::traits::issue_tracker::{IssueTracker, ReviewComment};
|
||||
use octocrab::Octocrab;
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
|
||||
pub struct GitHubTracker {
|
||||
client: Octocrab,
|
||||
}
|
||||
|
||||
impl GitHubTracker {
|
||||
pub fn new(token: &SecretString) -> Result<Self, CoreError> {
|
||||
let client = Octocrab::builder()
|
||||
.personal_token(token.expose_secret().to_string())
|
||||
.build()
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Failed to create GitHub client: {e}")))?;
|
||||
Ok(Self { client })
|
||||
}
|
||||
}
|
||||
|
||||
impl IssueTracker for GitHubTracker {
|
||||
fn name(&self) -> &str {
|
||||
"github"
|
||||
}
|
||||
|
||||
async fn create_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
title: &str,
|
||||
body: &str,
|
||||
labels: &[String],
|
||||
) -> Result<TrackerIssue, CoreError> {
|
||||
let issues_handler = self.client.issues(owner, repo);
|
||||
let mut builder = issues_handler.create(title).body(body);
|
||||
if !labels.is_empty() {
|
||||
builder = builder.labels(labels.to_vec());
|
||||
}
|
||||
let issue = builder
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitHub create issue failed: {e}")))?;
|
||||
|
||||
Ok(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::GitHub,
|
||||
issue.number.to_string(),
|
||||
issue.html_url.to_string(),
|
||||
title.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn update_issue_status(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
status: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
let issue_number: u64 = external_id
|
||||
.parse()
|
||||
.map_err(|_| CoreError::IssueTracker("Invalid issue number".to_string()))?;
|
||||
|
||||
let state_str = match status {
|
||||
"closed" | "resolved" => "closed",
|
||||
_ => "open",
|
||||
};
|
||||
|
||||
// Use the REST API directly for state update
|
||||
let route = format!("/repos/{owner}/{repo}/issues/{issue_number}");
|
||||
let body = serde_json::json!({ "state": state_str });
|
||||
self.client
|
||||
.post::<serde_json::Value, _>(route, Some(&body))
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitHub update issue failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_comment(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
body: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
let issue_number: u64 = external_id
|
||||
.parse()
|
||||
.map_err(|_| CoreError::IssueTracker("Invalid issue number".to_string()))?;
|
||||
|
||||
self.client
|
||||
.issues(owner, repo)
|
||||
.create_comment(issue_number, body)
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitHub add comment failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_pr_review(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
pr_number: u64,
|
||||
body: &str,
|
||||
comments: Vec<ReviewComment>,
|
||||
) -> Result<(), CoreError> {
|
||||
let review_comments: Vec<serde_json::Value> = comments
|
||||
.iter()
|
||||
.map(|c| {
|
||||
serde_json::json!({
|
||||
"path": c.path,
|
||||
"line": c.line,
|
||||
"body": c.body,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let review_body = serde_json::json!({
|
||||
"body": body,
|
||||
"event": "COMMENT",
|
||||
"comments": review_comments,
|
||||
});
|
||||
|
||||
let route = format!("/repos/{owner}/{repo}/pulls/{pr_number}/reviews");
|
||||
self.client
|
||||
.post::<serde_json::Value, ()>(route, Some(&review_body))
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitHub PR review failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_existing_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
fingerprint: &str,
|
||||
) -> Result<Option<TrackerIssue>, CoreError> {
|
||||
let query = format!("repo:{owner}/{repo} is:issue {fingerprint}");
|
||||
let results = self
|
||||
.client
|
||||
.search()
|
||||
.issues_and_pull_requests(&query)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitHub search failed: {e}")))?;
|
||||
|
||||
if let Some(issue) = results.items.first() {
|
||||
Ok(Some(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::GitHub,
|
||||
issue.number.to_string(),
|
||||
issue.html_url.to_string(),
|
||||
issue.title.clone(),
|
||||
)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
201
compliance-agent/src/trackers/gitlab.rs
Normal file
201
compliance-agent/src/trackers/gitlab.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
use compliance_core::error::CoreError;
|
||||
use compliance_core::models::{TrackerIssue, TrackerType};
|
||||
use compliance_core::traits::issue_tracker::{IssueTracker, ReviewComment};
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
|
||||
pub struct GitLabTracker {
|
||||
base_url: String,
|
||||
http: reqwest::Client,
|
||||
token: SecretString,
|
||||
}
|
||||
|
||||
impl GitLabTracker {
|
||||
pub fn new(base_url: String, token: SecretString) -> Self {
|
||||
Self {
|
||||
base_url: base_url.trim_end_matches('/').to_string(),
|
||||
http: reqwest::Client::new(),
|
||||
token,
|
||||
}
|
||||
}
|
||||
|
||||
fn api_url(&self, path: &str) -> String {
|
||||
format!("{}/api/v4{}", self.base_url, path)
|
||||
}
|
||||
|
||||
fn project_path(owner: &str, repo: &str) -> String {
|
||||
urlencoding::encode(&format!("{owner}/{repo}")).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl IssueTracker for GitLabTracker {
|
||||
fn name(&self) -> &str {
|
||||
"gitlab"
|
||||
}
|
||||
|
||||
async fn create_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
title: &str,
|
||||
body: &str,
|
||||
labels: &[String],
|
||||
) -> Result<TrackerIssue, CoreError> {
|
||||
let project = Self::project_path(owner, repo);
|
||||
let url = self.api_url(&format!("/projects/{project}/issues"));
|
||||
|
||||
let mut payload = serde_json::json!({
|
||||
"title": title,
|
||||
"description": body,
|
||||
});
|
||||
if !labels.is_empty() {
|
||||
payload["labels"] = serde_json::Value::String(labels.join(","));
|
||||
}
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post(&url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitLab create issue failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(CoreError::IssueTracker(format!("GitLab returned {status}: {body}")));
|
||||
}
|
||||
|
||||
let issue: serde_json::Value = resp.json().await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Failed to parse GitLab response: {e}")))?;
|
||||
|
||||
Ok(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::GitLab,
|
||||
issue["iid"].to_string(),
|
||||
issue["web_url"].as_str().unwrap_or("").to_string(),
|
||||
title.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn update_issue_status(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
status: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
let project = Self::project_path(owner, repo);
|
||||
let url = self.api_url(&format!("/projects/{project}/issues/{external_id}"));
|
||||
|
||||
let state_event = match status {
|
||||
"closed" | "resolved" => "close",
|
||||
_ => "reopen",
|
||||
};
|
||||
|
||||
self.http
|
||||
.put(&url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.json(&serde_json::json!({ "state_event": state_event }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitLab update issue failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_comment(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
body: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
let project = Self::project_path(owner, repo);
|
||||
let url = self.api_url(&format!("/projects/{project}/issues/{external_id}/notes"));
|
||||
|
||||
self.http
|
||||
.post(&url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.json(&serde_json::json!({ "body": body }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitLab add comment failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_pr_review(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
pr_number: u64,
|
||||
body: &str,
|
||||
comments: Vec<ReviewComment>,
|
||||
) -> Result<(), CoreError> {
|
||||
let project = Self::project_path(owner, repo);
|
||||
|
||||
// Post overall review as MR note
|
||||
let note_url = self.api_url(&format!("/projects/{project}/merge_requests/{pr_number}/notes"));
|
||||
self.http
|
||||
.post(¬e_url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.json(&serde_json::json!({ "body": body }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitLab MR note failed: {e}")))?;
|
||||
|
||||
// Post individual line comments as MR discussions
|
||||
for comment in comments {
|
||||
let disc_url = self.api_url(&format!("/projects/{project}/merge_requests/{pr_number}/discussions"));
|
||||
let payload = serde_json::json!({
|
||||
"body": comment.body,
|
||||
"position": {
|
||||
"position_type": "text",
|
||||
"new_path": comment.path,
|
||||
"new_line": comment.line,
|
||||
}
|
||||
});
|
||||
let _ = self
|
||||
.http
|
||||
.post(&disc_url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_existing_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
fingerprint: &str,
|
||||
) -> Result<Option<TrackerIssue>, CoreError> {
|
||||
let project = Self::project_path(owner, repo);
|
||||
let url = self.api_url(&format!("/projects/{project}/issues?search={fingerprint}"));
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.get(&url)
|
||||
.header("PRIVATE-TOKEN", self.token.expose_secret())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("GitLab search failed: {e}")))?;
|
||||
|
||||
let issues: Vec<serde_json::Value> = resp.json().await.unwrap_or_default();
|
||||
if let Some(issue) = issues.first() {
|
||||
Ok(Some(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::GitLab,
|
||||
issue["iid"].to_string(),
|
||||
issue["web_url"].as_str().unwrap_or("").to_string(),
|
||||
issue["title"].as_str().unwrap_or("").to_string(),
|
||||
)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
231
compliance-agent/src/trackers/jira.rs
Normal file
231
compliance-agent/src/trackers/jira.rs
Normal file
@@ -0,0 +1,231 @@
|
||||
use compliance_core::error::CoreError;
|
||||
use compliance_core::models::{TrackerIssue, TrackerType};
|
||||
use compliance_core::traits::issue_tracker::{IssueTracker, ReviewComment};
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
|
||||
pub struct JiraTracker {
|
||||
base_url: String,
|
||||
email: String,
|
||||
api_token: SecretString,
|
||||
project_key: String,
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl JiraTracker {
|
||||
pub fn new(base_url: String, email: String, api_token: SecretString, project_key: String) -> Self {
|
||||
Self {
|
||||
base_url: base_url.trim_end_matches('/').to_string(),
|
||||
email,
|
||||
api_token,
|
||||
project_key,
|
||||
http: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn auth_header(&self) -> String {
|
||||
use base64::Engine;
|
||||
let credentials = format!("{}:{}", self.email, self.api_token.expose_secret());
|
||||
format!("Basic {}", base64::engine::general_purpose::STANDARD.encode(credentials))
|
||||
}
|
||||
}
|
||||
|
||||
impl IssueTracker for JiraTracker {
|
||||
fn name(&self) -> &str {
|
||||
"jira"
|
||||
}
|
||||
|
||||
async fn create_issue(
|
||||
&self,
|
||||
_owner: &str,
|
||||
_repo: &str,
|
||||
title: &str,
|
||||
body: &str,
|
||||
labels: &[String],
|
||||
) -> Result<TrackerIssue, CoreError> {
|
||||
let url = format!("{}/rest/api/3/issue", self.base_url);
|
||||
|
||||
let mut payload = serde_json::json!({
|
||||
"fields": {
|
||||
"project": { "key": self.project_key },
|
||||
"summary": title,
|
||||
"description": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [{
|
||||
"type": "paragraph",
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": body,
|
||||
}]
|
||||
}]
|
||||
},
|
||||
"issuetype": { "name": "Bug" },
|
||||
}
|
||||
});
|
||||
|
||||
if !labels.is_empty() {
|
||||
payload["fields"]["labels"] = serde_json::Value::Array(
|
||||
labels.iter().map(|l| serde_json::Value::String(l.clone())).collect(),
|
||||
);
|
||||
}
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post(&url)
|
||||
.header("Authorization", self.auth_header())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Jira create issue failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(CoreError::IssueTracker(format!("Jira returned {status}: {body}")));
|
||||
}
|
||||
|
||||
let issue: serde_json::Value = resp.json().await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Failed to parse Jira response: {e}")))?;
|
||||
|
||||
let key = issue["key"].as_str().unwrap_or("").to_string();
|
||||
let url = format!("{}/browse/{}", self.base_url, key);
|
||||
|
||||
Ok(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::Jira,
|
||||
key,
|
||||
url,
|
||||
title.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn update_issue_status(
|
||||
&self,
|
||||
_owner: &str,
|
||||
_repo: &str,
|
||||
external_id: &str,
|
||||
status: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
// Get available transitions
|
||||
let url = format!("{}/rest/api/3/issue/{external_id}/transitions", self.base_url);
|
||||
let resp = self
|
||||
.http
|
||||
.get(&url)
|
||||
.header("Authorization", self.auth_header())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Jira get transitions failed: {e}")))?;
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap_or_default();
|
||||
let transitions = body["transitions"].as_array();
|
||||
|
||||
// Find matching transition
|
||||
if let Some(transitions) = transitions {
|
||||
let target = match status {
|
||||
"closed" | "resolved" => "Done",
|
||||
"in_progress" => "In Progress",
|
||||
_ => "To Do",
|
||||
};
|
||||
|
||||
if let Some(transition) = transitions.iter().find(|t| {
|
||||
t["name"].as_str().map(|n| n.eq_ignore_ascii_case(target)).unwrap_or(false)
|
||||
}) {
|
||||
let transition_id = transition["id"].as_str().unwrap_or("");
|
||||
self.http
|
||||
.post(&format!("{}/rest/api/3/issue/{external_id}/transitions", self.base_url))
|
||||
.header("Authorization", self.auth_header())
|
||||
.json(&serde_json::json!({ "transition": { "id": transition_id } }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Jira transition failed: {e}")))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn add_comment(
|
||||
&self,
|
||||
_owner: &str,
|
||||
_repo: &str,
|
||||
external_id: &str,
|
||||
body: &str,
|
||||
) -> Result<(), CoreError> {
|
||||
let url = format!("{}/rest/api/3/issue/{external_id}/comment", self.base_url);
|
||||
|
||||
self.http
|
||||
.post(&url)
|
||||
.header("Authorization", self.auth_header())
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&serde_json::json!({
|
||||
"body": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [{
|
||||
"type": "paragraph",
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": body,
|
||||
}]
|
||||
}]
|
||||
}
|
||||
}))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Jira add comment failed: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn create_pr_review(
|
||||
&self,
|
||||
_owner: &str,
|
||||
_repo: &str,
|
||||
_pr_number: u64,
|
||||
_body: &str,
|
||||
_comments: Vec<ReviewComment>,
|
||||
) -> Result<(), CoreError> {
|
||||
// Jira doesn't have native PR reviews - this is a no-op
|
||||
tracing::info!("Jira doesn't support PR reviews natively, skipping");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_existing_issue(
|
||||
&self,
|
||||
_owner: &str,
|
||||
_repo: &str,
|
||||
fingerprint: &str,
|
||||
) -> Result<Option<TrackerIssue>, CoreError> {
|
||||
let jql = format!(
|
||||
"project = {} AND text ~ \"{}\"",
|
||||
self.project_key, fingerprint
|
||||
);
|
||||
let url = format!("{}/rest/api/3/search", self.base_url);
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.get(&url)
|
||||
.header("Authorization", self.auth_header())
|
||||
.query(&[("jql", &jql), ("maxResults", &"1".to_string())])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| CoreError::IssueTracker(format!("Jira search failed: {e}")))?;
|
||||
|
||||
let body: serde_json::Value = resp.json().await.unwrap_or_default();
|
||||
if let Some(issue) = body["issues"].as_array().and_then(|arr| arr.first()) {
|
||||
let key = issue["key"].as_str().unwrap_or("").to_string();
|
||||
let url = format!("{}/browse/{}", self.base_url, key);
|
||||
let title = issue["fields"]["summary"].as_str().unwrap_or("").to_string();
|
||||
Ok(Some(TrackerIssue::new(
|
||||
String::new(),
|
||||
TrackerType::Jira,
|
||||
key,
|
||||
url,
|
||||
title,
|
||||
)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
3
compliance-agent/src/trackers/mod.rs
Normal file
3
compliance-agent/src/trackers/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
pub mod jira;
|
||||
130
compliance-agent/src/webhooks/github.rs
Normal file
130
compliance-agent/src/webhooks/github.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::body::Bytes;
|
||||
use axum::extract::Extension;
|
||||
use axum::http::{HeaderMap, StatusCode};
|
||||
use hmac::{Hmac, Mac};
|
||||
use secrecy::ExposeSecret;
|
||||
use sha2::Sha256;
|
||||
|
||||
use compliance_core::models::ScanTrigger;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
|
||||
type HmacSha256 = Hmac<Sha256>;
|
||||
|
||||
pub async fn handle_github_webhook(
|
||||
Extension(agent): Extension<Arc<ComplianceAgent>>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> StatusCode {
|
||||
// Verify HMAC signature
|
||||
if let Some(secret) = &agent.config.github_webhook_secret {
|
||||
let signature = headers
|
||||
.get("x-hub-signature-256")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
if !verify_signature(secret.expose_secret(), &body, signature) {
|
||||
tracing::warn!("GitHub webhook: invalid signature");
|
||||
return StatusCode::UNAUTHORIZED;
|
||||
}
|
||||
}
|
||||
|
||||
let event = headers
|
||||
.get("x-github-event")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
let payload: serde_json::Value = match serde_json::from_slice(&body) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
tracing::warn!("GitHub webhook: invalid JSON: {e}");
|
||||
return StatusCode::BAD_REQUEST;
|
||||
}
|
||||
};
|
||||
|
||||
match event {
|
||||
"push" => handle_push(agent, &payload).await,
|
||||
"pull_request" => handle_pull_request(agent, &payload).await,
|
||||
_ => {
|
||||
tracing::debug!("GitHub webhook: ignoring event '{event}'");
|
||||
StatusCode::OK
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_push(agent: Arc<ComplianceAgent>, payload: &serde_json::Value) -> StatusCode {
|
||||
let repo_url = payload["repository"]["clone_url"]
|
||||
.as_str()
|
||||
.or_else(|| payload["repository"]["html_url"].as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if repo_url.is_empty() {
|
||||
return StatusCode::BAD_REQUEST;
|
||||
}
|
||||
|
||||
// Find matching tracked repository
|
||||
let repo = agent
|
||||
.db
|
||||
.repositories()
|
||||
.find_one(mongodb::bson::doc! { "git_url": repo_url })
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
if let Some(repo) = repo {
|
||||
let repo_id = repo.id.map(|id| id.to_hex()).unwrap_or_default();
|
||||
let agent_clone = (*agent).clone();
|
||||
tokio::spawn(async move {
|
||||
tracing::info!("GitHub push webhook: triggering scan for {repo_id}");
|
||||
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
|
||||
tracing::error!("Webhook-triggered scan failed: {e}");
|
||||
}
|
||||
});
|
||||
} else {
|
||||
tracing::debug!("GitHub push webhook: no tracked repo for {repo_url}");
|
||||
}
|
||||
|
||||
StatusCode::OK
|
||||
}
|
||||
|
||||
async fn handle_pull_request(
|
||||
_agent: Arc<ComplianceAgent>,
|
||||
payload: &serde_json::Value,
|
||||
) -> StatusCode {
|
||||
let action = payload["action"].as_str().unwrap_or("");
|
||||
if action != "opened" && action != "synchronize" {
|
||||
return StatusCode::OK;
|
||||
}
|
||||
|
||||
let repo_url = payload["repository"]["clone_url"]
|
||||
.as_str()
|
||||
.unwrap_or("");
|
||||
let pr_number = payload["pull_request"]["number"].as_u64().unwrap_or(0);
|
||||
|
||||
if repo_url.is_empty() || pr_number == 0 {
|
||||
return StatusCode::BAD_REQUEST;
|
||||
}
|
||||
|
||||
tracing::info!("GitHub PR webhook: PR #{pr_number} {action} on {repo_url}");
|
||||
// PR review scan would be triggered here - runs incremental SAST on diff
|
||||
// and posts review comments via the GitHub tracker
|
||||
|
||||
StatusCode::OK
|
||||
}
|
||||
|
||||
fn verify_signature(secret: &str, body: &[u8], signature: &str) -> bool {
|
||||
let sig = signature.strip_prefix("sha256=").unwrap_or(signature);
|
||||
let sig_bytes = match hex::decode(sig) {
|
||||
Ok(b) => b,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let mut mac = match HmacSha256::new_from_slice(secret.as_bytes()) {
|
||||
Ok(m) => m,
|
||||
Err(_) => return false,
|
||||
};
|
||||
mac.update(body);
|
||||
mac.verify_slice(&sig_bytes).is_ok()
|
||||
}
|
||||
95
compliance-agent/src/webhooks/gitlab.rs
Normal file
95
compliance-agent/src/webhooks/gitlab.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::body::Bytes;
|
||||
use axum::extract::Extension;
|
||||
use axum::http::{HeaderMap, StatusCode};
|
||||
use secrecy::ExposeSecret;
|
||||
|
||||
use compliance_core::models::ScanTrigger;
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
|
||||
pub async fn handle_gitlab_webhook(
|
||||
Extension(agent): Extension<Arc<ComplianceAgent>>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> StatusCode {
|
||||
// Verify GitLab token
|
||||
if let Some(secret) = &agent.config.gitlab_webhook_secret {
|
||||
let token = headers
|
||||
.get("x-gitlab-token")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
if token != secret.expose_secret() {
|
||||
tracing::warn!("GitLab webhook: invalid token");
|
||||
return StatusCode::UNAUTHORIZED;
|
||||
}
|
||||
}
|
||||
|
||||
let payload: serde_json::Value = match serde_json::from_slice(&body) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
tracing::warn!("GitLab webhook: invalid JSON: {e}");
|
||||
return StatusCode::BAD_REQUEST;
|
||||
}
|
||||
};
|
||||
|
||||
let event_type = payload["object_kind"].as_str().unwrap_or("");
|
||||
|
||||
match event_type {
|
||||
"push" => handle_push(agent, &payload).await,
|
||||
"merge_request" => handle_merge_request(agent, &payload).await,
|
||||
_ => {
|
||||
tracing::debug!("GitLab webhook: ignoring event '{event_type}'");
|
||||
StatusCode::OK
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_push(agent: Arc<ComplianceAgent>, payload: &serde_json::Value) -> StatusCode {
|
||||
let repo_url = payload["project"]["git_http_url"]
|
||||
.as_str()
|
||||
.or_else(|| payload["project"]["web_url"].as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if repo_url.is_empty() {
|
||||
return StatusCode::BAD_REQUEST;
|
||||
}
|
||||
|
||||
let repo = agent
|
||||
.db
|
||||
.repositories()
|
||||
.find_one(mongodb::bson::doc! { "git_url": repo_url })
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
if let Some(repo) = repo {
|
||||
let repo_id = repo.id.map(|id| id.to_hex()).unwrap_or_default();
|
||||
let agent_clone = (*agent).clone();
|
||||
tokio::spawn(async move {
|
||||
tracing::info!("GitLab push webhook: triggering scan for {repo_id}");
|
||||
if let Err(e) = agent_clone.run_scan(&repo_id, ScanTrigger::Webhook).await {
|
||||
tracing::error!("Webhook-triggered scan failed: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
StatusCode::OK
|
||||
}
|
||||
|
||||
async fn handle_merge_request(
|
||||
_agent: Arc<ComplianceAgent>,
|
||||
payload: &serde_json::Value,
|
||||
) -> StatusCode {
|
||||
let action = payload["object_attributes"]["action"].as_str().unwrap_or("");
|
||||
if action != "open" && action != "update" {
|
||||
return StatusCode::OK;
|
||||
}
|
||||
|
||||
let mr_iid = payload["object_attributes"]["iid"].as_u64().unwrap_or(0);
|
||||
tracing::info!("GitLab MR webhook: MR !{mr_iid} {action}");
|
||||
|
||||
StatusCode::OK
|
||||
}
|
||||
5
compliance-agent/src/webhooks/mod.rs
Normal file
5
compliance-agent/src/webhooks/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod github;
|
||||
pub mod gitlab;
|
||||
pub mod server;
|
||||
|
||||
pub use server::start_webhook_server;
|
||||
27
compliance-agent/src/webhooks/server.rs
Normal file
27
compliance-agent/src/webhooks/server.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::routing::post;
|
||||
use axum::{Extension, Router};
|
||||
|
||||
use crate::agent::ComplianceAgent;
|
||||
use crate::error::AgentError;
|
||||
use crate::webhooks::{github, gitlab};
|
||||
|
||||
pub async fn start_webhook_server(agent: &ComplianceAgent) -> Result<(), AgentError> {
|
||||
let app = Router::new()
|
||||
.route("/webhook/github", post(github::handle_github_webhook))
|
||||
.route("/webhook/gitlab", post(gitlab::handle_gitlab_webhook))
|
||||
.layer(Extension(Arc::new(agent.clone())));
|
||||
|
||||
let addr = "0.0.0.0:3002";
|
||||
let listener = tokio::net::TcpListener::bind(addr)
|
||||
.await
|
||||
.map_err(|e| AgentError::Other(format!("Failed to bind webhook server: {e}")))?;
|
||||
|
||||
tracing::info!("Webhook server listening on {addr}");
|
||||
axum::serve(listener, app)
|
||||
.await
|
||||
.map_err(|e| AgentError::Other(format!("Webhook server error: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
18
compliance-core/Cargo.toml
Normal file
18
compliance-core/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "compliance-core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
hex = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
secrecy = { workspace = true }
|
||||
mongodb = { workspace = true }
|
||||
34
compliance-core/src/config.rs
Normal file
34
compliance-core/src/config.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use secrecy::SecretString;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AgentConfig {
|
||||
pub mongodb_uri: String,
|
||||
pub mongodb_database: String,
|
||||
pub litellm_url: String,
|
||||
pub litellm_api_key: SecretString,
|
||||
pub litellm_model: String,
|
||||
pub github_token: Option<SecretString>,
|
||||
pub github_webhook_secret: Option<SecretString>,
|
||||
pub gitlab_url: Option<String>,
|
||||
pub gitlab_token: Option<SecretString>,
|
||||
pub gitlab_webhook_secret: Option<SecretString>,
|
||||
pub jira_url: Option<String>,
|
||||
pub jira_email: Option<String>,
|
||||
pub jira_api_token: Option<SecretString>,
|
||||
pub jira_project_key: Option<String>,
|
||||
pub searxng_url: Option<String>,
|
||||
pub nvd_api_key: Option<SecretString>,
|
||||
pub agent_port: u16,
|
||||
pub scan_schedule: String,
|
||||
pub cve_monitor_schedule: String,
|
||||
pub git_clone_base_path: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct DashboardConfig {
|
||||
pub mongodb_uri: String,
|
||||
pub mongodb_database: String,
|
||||
pub agent_api_url: String,
|
||||
pub dashboard_port: u16,
|
||||
}
|
||||
41
compliance-core/src/error.rs
Normal file
41
compliance-core/src/error.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CoreError {
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] mongodb::error::Error),
|
||||
|
||||
#[error("Serialization error: {0}")]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
|
||||
#[error("Git error: {0}")]
|
||||
Git(String),
|
||||
|
||||
#[error("Scanner error: {source}")]
|
||||
Scanner {
|
||||
scanner: String,
|
||||
#[source]
|
||||
source: Box<dyn std::error::Error + Send + Sync>,
|
||||
},
|
||||
|
||||
#[error("LLM error: {0}")]
|
||||
Llm(String),
|
||||
|
||||
#[error("Issue tracker error: {0}")]
|
||||
IssueTracker(String),
|
||||
|
||||
#[error("HTTP error: {0}")]
|
||||
Http(String),
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
Config(String),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("Not found: {0}")]
|
||||
NotFound(String),
|
||||
|
||||
#[error("{0}")]
|
||||
Other(String),
|
||||
}
|
||||
7
compliance-core/src/lib.rs
Normal file
7
compliance-core/src/lib.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod models;
|
||||
pub mod traits;
|
||||
|
||||
pub use config::{AgentConfig, DashboardConfig};
|
||||
pub use error::CoreError;
|
||||
46
compliance-core/src/models/cve.rs
Normal file
46
compliance-core/src/models/cve.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum CveSource {
|
||||
Osv,
|
||||
Nvd,
|
||||
SearXNG,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CveAlert {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub cve_id: String,
|
||||
pub repo_id: String,
|
||||
pub affected_package: String,
|
||||
pub affected_version: String,
|
||||
pub source: CveSource,
|
||||
pub severity: Option<String>,
|
||||
pub cvss_score: Option<f64>,
|
||||
pub summary: Option<String>,
|
||||
pub llm_impact_summary: Option<String>,
|
||||
pub references: Vec<String>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl CveAlert {
|
||||
pub fn new(cve_id: String, repo_id: String, affected_package: String, affected_version: String, source: CveSource) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
cve_id,
|
||||
repo_id,
|
||||
affected_package,
|
||||
affected_version,
|
||||
source,
|
||||
severity: None,
|
||||
cvss_score: None,
|
||||
summary: None,
|
||||
llm_impact_summary: None,
|
||||
references: Vec::new(),
|
||||
created_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
115
compliance-core/src/models/finding.rs
Normal file
115
compliance-core/src/models/finding.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::scan::ScanType;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Severity {
|
||||
Info,
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Critical,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Severity {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Info => write!(f, "info"),
|
||||
Self::Low => write!(f, "low"),
|
||||
Self::Medium => write!(f, "medium"),
|
||||
Self::High => write!(f, "high"),
|
||||
Self::Critical => write!(f, "critical"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum FindingStatus {
|
||||
Open,
|
||||
Triaged,
|
||||
FalsePositive,
|
||||
Resolved,
|
||||
Ignored,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FindingStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Open => write!(f, "open"),
|
||||
Self::Triaged => write!(f, "triaged"),
|
||||
Self::FalsePositive => write!(f, "false_positive"),
|
||||
Self::Resolved => write!(f, "resolved"),
|
||||
Self::Ignored => write!(f, "ignored"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Finding {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub repo_id: String,
|
||||
pub fingerprint: String,
|
||||
pub scanner: String,
|
||||
pub scan_type: ScanType,
|
||||
pub rule_id: Option<String>,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub severity: Severity,
|
||||
pub confidence: Option<f64>,
|
||||
pub cwe: Option<String>,
|
||||
pub cve: Option<String>,
|
||||
pub cvss_score: Option<f64>,
|
||||
pub file_path: Option<String>,
|
||||
pub line_number: Option<u32>,
|
||||
pub code_snippet: Option<String>,
|
||||
pub remediation: Option<String>,
|
||||
pub suggested_fix: Option<String>,
|
||||
pub status: FindingStatus,
|
||||
pub tracker_issue_url: Option<String>,
|
||||
pub scan_run_id: Option<String>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl Finding {
|
||||
pub fn new(
|
||||
repo_id: String,
|
||||
fingerprint: String,
|
||||
scanner: String,
|
||||
scan_type: ScanType,
|
||||
title: String,
|
||||
description: String,
|
||||
severity: Severity,
|
||||
) -> Self {
|
||||
let now = Utc::now();
|
||||
Self {
|
||||
id: None,
|
||||
repo_id,
|
||||
fingerprint,
|
||||
scanner,
|
||||
scan_type,
|
||||
rule_id: None,
|
||||
title,
|
||||
description,
|
||||
severity,
|
||||
confidence: None,
|
||||
cwe: None,
|
||||
cve: None,
|
||||
cvss_score: None,
|
||||
file_path: None,
|
||||
line_number: None,
|
||||
code_snippet: None,
|
||||
remediation: None,
|
||||
suggested_fix: None,
|
||||
status: FindingStatus::Open,
|
||||
tracker_issue_url: None,
|
||||
scan_run_id: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
77
compliance-core/src/models/issue.rs
Normal file
77
compliance-core/src/models/issue.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum TrackerType {
|
||||
GitHub,
|
||||
GitLab,
|
||||
Jira,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TrackerType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::GitHub => write!(f, "github"),
|
||||
Self::GitLab => write!(f, "gitlab"),
|
||||
Self::Jira => write!(f, "jira"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum IssueStatus {
|
||||
Open,
|
||||
InProgress,
|
||||
Closed,
|
||||
Resolved,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IssueStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Open => write!(f, "open"),
|
||||
Self::InProgress => write!(f, "in_progress"),
|
||||
Self::Closed => write!(f, "closed"),
|
||||
Self::Resolved => write!(f, "resolved"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrackerIssue {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub finding_id: String,
|
||||
pub tracker_type: TrackerType,
|
||||
pub external_id: String,
|
||||
pub external_url: String,
|
||||
pub title: String,
|
||||
pub status: IssueStatus,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl TrackerIssue {
|
||||
pub fn new(
|
||||
finding_id: String,
|
||||
tracker_type: TrackerType,
|
||||
external_id: String,
|
||||
external_url: String,
|
||||
title: String,
|
||||
) -> Self {
|
||||
let now = Utc::now();
|
||||
Self {
|
||||
id: None,
|
||||
finding_id,
|
||||
tracker_type,
|
||||
external_id,
|
||||
external_url,
|
||||
title,
|
||||
status: IssueStatus::Open,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
13
compliance-core/src/models/mod.rs
Normal file
13
compliance-core/src/models/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub mod cve;
|
||||
pub mod finding;
|
||||
pub mod issue;
|
||||
pub mod repository;
|
||||
pub mod sbom;
|
||||
pub mod scan;
|
||||
|
||||
pub use cve::{CveAlert, CveSource};
|
||||
pub use finding::{Finding, FindingStatus, Severity};
|
||||
pub use issue::{IssueStatus, TrackerIssue, TrackerType};
|
||||
pub use repository::{ScanTrigger, TrackedRepository};
|
||||
pub use sbom::{SbomEntry, VulnRef};
|
||||
pub use scan::{ScanPhase, ScanRun, ScanRunStatus, ScanType};
|
||||
53
compliance-core/src/models/repository.rs
Normal file
53
compliance-core/src/models/repository.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::issue::TrackerType;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ScanTrigger {
|
||||
Scheduled,
|
||||
Webhook,
|
||||
Manual,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrackedRepository {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub name: String,
|
||||
pub git_url: String,
|
||||
pub default_branch: String,
|
||||
pub local_path: Option<String>,
|
||||
pub scan_schedule: Option<String>,
|
||||
pub webhook_enabled: bool,
|
||||
pub tracker_type: Option<TrackerType>,
|
||||
pub tracker_owner: Option<String>,
|
||||
pub tracker_repo: Option<String>,
|
||||
pub last_scanned_commit: Option<String>,
|
||||
pub findings_count: u32,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl TrackedRepository {
|
||||
pub fn new(name: String, git_url: String) -> Self {
|
||||
let now = Utc::now();
|
||||
Self {
|
||||
id: None,
|
||||
name,
|
||||
git_url,
|
||||
default_branch: "main".to_string(),
|
||||
local_path: None,
|
||||
scan_schedule: None,
|
||||
webhook_enabled: false,
|
||||
tracker_type: None,
|
||||
tracker_owner: None,
|
||||
tracker_repo: None,
|
||||
last_scanned_commit: None,
|
||||
findings_count: 0,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
43
compliance-core/src/models/sbom.rs
Normal file
43
compliance-core/src/models/sbom.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct VulnRef {
|
||||
pub id: String,
|
||||
pub source: String,
|
||||
pub severity: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SbomEntry {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub repo_id: String,
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub package_manager: String,
|
||||
pub license: Option<String>,
|
||||
pub purl: Option<String>,
|
||||
pub known_vulnerabilities: Vec<VulnRef>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl SbomEntry {
|
||||
pub fn new(repo_id: String, name: String, version: String, package_manager: String) -> Self {
|
||||
let now = Utc::now();
|
||||
Self {
|
||||
id: None,
|
||||
repo_id,
|
||||
name,
|
||||
version,
|
||||
package_manager,
|
||||
license: None,
|
||||
purl: None,
|
||||
known_vulnerabilities: Vec::new(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
81
compliance-core/src/models/scan.rs
Normal file
81
compliance-core/src/models/scan.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::repository::ScanTrigger;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ScanType {
|
||||
Sast,
|
||||
Sbom,
|
||||
Cve,
|
||||
Gdpr,
|
||||
OAuth,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ScanType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Sast => write!(f, "sast"),
|
||||
Self::Sbom => write!(f, "sbom"),
|
||||
Self::Cve => write!(f, "cve"),
|
||||
Self::Gdpr => write!(f, "gdpr"),
|
||||
Self::OAuth => write!(f, "oauth"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ScanRunStatus {
|
||||
Running,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ScanPhase {
|
||||
ChangeDetection,
|
||||
Sast,
|
||||
SbomGeneration,
|
||||
CveScanning,
|
||||
PatternScanning,
|
||||
LlmTriage,
|
||||
IssueCreation,
|
||||
Completed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ScanRun {
|
||||
#[serde(rename = "_id", skip_serializing_if = "Option::is_none")]
|
||||
pub id: Option<mongodb::bson::oid::ObjectId>,
|
||||
pub repo_id: String,
|
||||
pub trigger: ScanTrigger,
|
||||
pub commit_sha: Option<String>,
|
||||
pub status: ScanRunStatus,
|
||||
pub current_phase: ScanPhase,
|
||||
pub phases_completed: Vec<ScanPhase>,
|
||||
pub new_findings_count: u32,
|
||||
pub error_message: Option<String>,
|
||||
pub started_at: DateTime<Utc>,
|
||||
pub completed_at: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl ScanRun {
|
||||
pub fn new(repo_id: String, trigger: ScanTrigger) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
repo_id,
|
||||
trigger,
|
||||
commit_sha: None,
|
||||
status: ScanRunStatus::Running,
|
||||
current_phase: ScanPhase::ChangeDetection,
|
||||
phases_completed: Vec::new(),
|
||||
new_findings_count: 0,
|
||||
error_message: None,
|
||||
started_at: Utc::now(),
|
||||
completed_at: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
55
compliance-core/src/traits/issue_tracker.rs
Normal file
55
compliance-core/src/traits/issue_tracker.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use crate::error::CoreError;
|
||||
use crate::models::TrackerIssue;
|
||||
|
||||
#[allow(async_fn_in_trait)]
|
||||
pub trait IssueTracker: Send + Sync {
|
||||
fn name(&self) -> &str;
|
||||
|
||||
async fn create_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
title: &str,
|
||||
body: &str,
|
||||
labels: &[String],
|
||||
) -> Result<TrackerIssue, CoreError>;
|
||||
|
||||
async fn update_issue_status(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
status: &str,
|
||||
) -> Result<(), CoreError>;
|
||||
|
||||
async fn add_comment(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
external_id: &str,
|
||||
body: &str,
|
||||
) -> Result<(), CoreError>;
|
||||
|
||||
async fn create_pr_review(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
pr_number: u64,
|
||||
body: &str,
|
||||
comments: Vec<ReviewComment>,
|
||||
) -> Result<(), CoreError>;
|
||||
|
||||
async fn find_existing_issue(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
fingerprint: &str,
|
||||
) -> Result<Option<TrackerIssue>, CoreError>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReviewComment {
|
||||
pub path: String,
|
||||
pub line: u32,
|
||||
pub body: String,
|
||||
}
|
||||
5
compliance-core/src/traits/mod.rs
Normal file
5
compliance-core/src/traits/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod issue_tracker;
|
||||
pub mod scanner;
|
||||
|
||||
pub use issue_tracker::IssueTracker;
|
||||
pub use scanner::{ScanOutput, Scanner};
|
||||
17
compliance-core/src/traits/scanner.rs
Normal file
17
compliance-core/src/traits/scanner.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::error::CoreError;
|
||||
use crate::models::{Finding, SbomEntry, ScanType};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ScanOutput {
|
||||
pub findings: Vec<Finding>,
|
||||
pub sbom_entries: Vec<SbomEntry>,
|
||||
}
|
||||
|
||||
#[allow(async_fn_in_trait)]
|
||||
pub trait Scanner: Send + Sync {
|
||||
fn name(&self) -> &str;
|
||||
fn scan_type(&self) -> ScanType;
|
||||
async fn scan(&self, repo_path: &Path, repo_id: &str) -> Result<ScanOutput, CoreError>;
|
||||
}
|
||||
54
compliance-dashboard/Cargo.toml
Normal file
54
compliance-dashboard/Cargo.toml
Normal file
@@ -0,0 +1,54 @@
|
||||
[package]
|
||||
name = "compliance-dashboard"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
default-run = "compliance-dashboard"
|
||||
|
||||
[[bin]]
|
||||
name = "compliance-dashboard"
|
||||
path = "../bin/main.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
web = ["dioxus/web", "dioxus/router", "dioxus/fullstack", "dep:reqwest", "dep:web-sys"]
|
||||
server = [
|
||||
"dioxus/server",
|
||||
"dioxus/router",
|
||||
"dioxus/fullstack",
|
||||
"dep:axum",
|
||||
"dep:mongodb",
|
||||
"dep:reqwest",
|
||||
"dep:tower-http",
|
||||
"dep:secrecy",
|
||||
"dep:dotenvy",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-fullstack",
|
||||
"dep:tokio",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
compliance-core = { workspace = true }
|
||||
dioxus = "=0.7.3"
|
||||
dioxus-free-icons = { version = "0.10", features = ["bootstrap"] }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
dioxus-logger = "0.6"
|
||||
thiserror = { workspace = true }
|
||||
|
||||
# Web-only
|
||||
reqwest = { workspace = true, optional = true }
|
||||
web-sys = { version = "0.3", optional = true }
|
||||
|
||||
# Server-only
|
||||
axum = { version = "0.8", optional = true }
|
||||
mongodb = { workspace = true, optional = true }
|
||||
tower-http = { version = "0.6", features = ["cors", "trace"], optional = true }
|
||||
secrecy = { workspace = true, optional = true }
|
||||
dotenvy = { version = "0.15", optional = true }
|
||||
tokio = { workspace = true, optional = true }
|
||||
dioxus-cli-config = { version = "=0.7.3", optional = true }
|
||||
dioxus-fullstack = { version = "=0.7.3", optional = true }
|
||||
28
compliance-dashboard/assets/favicon.svg
Normal file
28
compliance-dashboard/assets/favicon.svg
Normal file
@@ -0,0 +1,28 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 96 96">
|
||||
<defs>
|
||||
<linearGradient id="bg" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" stop-color="#0f172a"/>
|
||||
<stop offset="100%" stop-color="#1e293b"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="shield" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" stop-color="#38bdf8"/>
|
||||
<stop offset="100%" stop-color="#818cf8"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="96" height="96" rx="18" fill="url(#bg)"/>
|
||||
<!-- Shield outline -->
|
||||
<path d="M48 14 L28 26 L28 48 C28 62 37 74 48 78 C59 74 68 62 68 48 L68 26 Z"
|
||||
fill="none" stroke="url(#shield)" stroke-width="3" stroke-linejoin="round"/>
|
||||
<!-- Inner shield fill (subtle) -->
|
||||
<path d="M48 18 L31 28.5 L31 47 C31 59.5 39 70 48 74 C57 70 65 59.5 65 47 L65 28.5 Z"
|
||||
fill="url(#shield)" opacity="0.1"/>
|
||||
<!-- Magnifying glass -->
|
||||
<circle cx="45" cy="44" r="10" fill="none" stroke="#38bdf8" stroke-width="2.5"/>
|
||||
<line x1="52" y1="51" x2="60" y2="59" stroke="#38bdf8" stroke-width="2.5" stroke-linecap="round"/>
|
||||
<!-- Checkmark inside magnifier -->
|
||||
<path d="M40 44 L43.5 47.5 L50 41" fill="none" stroke="#22c55e" stroke-width="2.5"
|
||||
stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<!-- Scan lines (decorative) -->
|
||||
<line x1="34" y1="32" x2="46" y2="32" stroke="#38bdf8" stroke-width="1.5" opacity="0.4" stroke-linecap="round"/>
|
||||
<line x1="34" y1="36" x2="42" y2="36" stroke="#38bdf8" stroke-width="1.5" opacity="0.3" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
315
compliance-dashboard/assets/main.css
Normal file
315
compliance-dashboard/assets/main.css
Normal file
@@ -0,0 +1,315 @@
|
||||
:root {
|
||||
--sidebar-width: 260px;
|
||||
--header-height: 56px;
|
||||
--bg-primary: #0f172a;
|
||||
--bg-secondary: #1e293b;
|
||||
--bg-card: #1e293b;
|
||||
--text-primary: #f1f5f9;
|
||||
--text-secondary: #94a3b8;
|
||||
--accent: #38bdf8;
|
||||
--accent-hover: #7dd3fc;
|
||||
--border: #334155;
|
||||
--danger: #ef4444;
|
||||
--warning: #f59e0b;
|
||||
--success: #22c55e;
|
||||
--info: #3b82f6;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.app-shell {
|
||||
display: flex;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
width: var(--sidebar-width);
|
||||
background: var(--bg-secondary);
|
||||
border-right: 1px solid var(--border);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
z-index: 40;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
padding: 20px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.sidebar-header h1 {
|
||||
font-size: 16px;
|
||||
font-weight: 700;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.sidebar-nav {
|
||||
padding: 12px 8px;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 10px 12px;
|
||||
border-radius: 8px;
|
||||
color: var(--text-secondary);
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
transition: all 0.15s;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.nav-item:hover {
|
||||
background: rgba(56, 189, 248, 0.1);
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.nav-item.active {
|
||||
background: rgba(56, 189, 248, 0.15);
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.main-content {
|
||||
margin-left: var(--sidebar-width);
|
||||
flex: 1;
|
||||
padding: 24px 32px;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.page-header {
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.page-header h2 {
|
||||
font-size: 24px;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.page-header p {
|
||||
color: var(--text-secondary);
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.stat-cards {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
||||
gap: 16px;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.stat-card {
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.stat-card .label {
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.stat-card .value {
|
||||
font-size: 28px;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.card {
|
||||
background: var(--bg-card);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 12px;
|
||||
padding: 20px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.card-header {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
margin-bottom: 16px;
|
||||
padding-bottom: 12px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.table-wrapper {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: left;
|
||||
padding: 12px 16px;
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
border-bottom: 1px solid var(--border);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
td {
|
||||
padding: 12px 16px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
tr:hover {
|
||||
background: rgba(56, 189, 248, 0.05);
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 10px;
|
||||
border-radius: 9999px;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.badge-critical { background: rgba(239, 68, 68, 0.2); color: #fca5a5; }
|
||||
.badge-high { background: rgba(249, 115, 22, 0.2); color: #fdba74; }
|
||||
.badge-medium { background: rgba(245, 158, 11, 0.2); color: #fcd34d; }
|
||||
.badge-low { background: rgba(34, 197, 94, 0.2); color: #86efac; }
|
||||
.badge-info { background: rgba(59, 130, 246, 0.2); color: #93c5fd; }
|
||||
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 8px 16px;
|
||||
border-radius: 8px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent);
|
||||
color: #0f172a;
|
||||
}
|
||||
|
||||
.btn-primary:hover {
|
||||
background: var(--accent-hover);
|
||||
}
|
||||
|
||||
.btn-ghost {
|
||||
background: transparent;
|
||||
color: var(--text-secondary);
|
||||
border: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.btn-ghost:hover {
|
||||
color: var(--text-primary);
|
||||
border-color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.code-block {
|
||||
background: #0d1117;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 16px;
|
||||
font-family: "JetBrains Mono", "Fira Code", monospace;
|
||||
font-size: 13px;
|
||||
line-height: 1.6;
|
||||
overflow-x: auto;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.pagination {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
margin-top: 16px;
|
||||
}
|
||||
|
||||
.filter-bar {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-bottom: 16px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.filter-bar select,
|
||||
.filter-bar input {
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 8px 12px;
|
||||
color: var(--text-primary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
margin-bottom: 6px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.form-group input,
|
||||
.form-group select {
|
||||
width: 100%;
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 10px 14px;
|
||||
color: var(--text-primary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 40px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.sidebar {
|
||||
transform: translateX(-100%);
|
||||
transition: transform 0.3s;
|
||||
}
|
||||
.sidebar.open {
|
||||
transform: translateX(0);
|
||||
}
|
||||
.main-content {
|
||||
margin-left: 0;
|
||||
padding: 16px;
|
||||
}
|
||||
}
|
||||
1
compliance-dashboard/assets/tailwind.css
Normal file
1
compliance-dashboard/assets/tailwind.css
Normal file
@@ -0,0 +1 @@
|
||||
/* Placeholder - generated by build.rs via bunx @tailwindcss/cli */
|
||||
38
compliance-dashboard/src/app.rs
Normal file
38
compliance-dashboard/src/app.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::app_shell::AppShell;
|
||||
use crate::pages::*;
|
||||
|
||||
#[derive(Debug, Clone, Routable, PartialEq)]
|
||||
#[rustfmt::skip]
|
||||
pub enum Route {
|
||||
#[layout(AppShell)]
|
||||
#[route("/")]
|
||||
OverviewPage {},
|
||||
#[route("/repositories")]
|
||||
RepositoriesPage {},
|
||||
#[route("/findings")]
|
||||
FindingsPage {},
|
||||
#[route("/findings/:id")]
|
||||
FindingDetailPage { id: String },
|
||||
#[route("/sbom")]
|
||||
SbomPage {},
|
||||
#[route("/issues")]
|
||||
IssuesPage {},
|
||||
#[route("/settings")]
|
||||
SettingsPage {},
|
||||
}
|
||||
|
||||
const FAVICON: Asset = asset!("/assets/favicon.svg");
|
||||
const MAIN_CSS: Asset = asset!("/assets/main.css");
|
||||
const TAILWIND_CSS: Asset = asset!("/assets/tailwind.css");
|
||||
|
||||
#[component]
|
||||
pub fn App() -> Element {
|
||||
rsx! {
|
||||
document::Link { rel: "icon", href: FAVICON }
|
||||
document::Link { rel: "stylesheet", href: TAILWIND_CSS }
|
||||
document::Link { rel: "stylesheet", href: MAIN_CSS }
|
||||
Router::<Route> {}
|
||||
}
|
||||
}
|
||||
16
compliance-dashboard/src/components/app_shell.rs
Normal file
16
compliance-dashboard/src/components/app_shell.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::app::Route;
|
||||
use crate::components::sidebar::Sidebar;
|
||||
|
||||
#[component]
|
||||
pub fn AppShell() -> Element {
|
||||
rsx! {
|
||||
div { class: "app-shell",
|
||||
Sidebar {}
|
||||
main { class: "main-content",
|
||||
Outlet::<Route> {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
23
compliance-dashboard/src/components/code_snippet.rs
Normal file
23
compliance-dashboard/src/components/code_snippet.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[component]
|
||||
pub fn CodeSnippet(
|
||||
code: String,
|
||||
#[props(default)] file_path: String,
|
||||
#[props(default)] line_number: u32,
|
||||
) -> Element {
|
||||
rsx! {
|
||||
div {
|
||||
if !file_path.is_empty() {
|
||||
div {
|
||||
style: "font-size: 12px; color: var(--text-secondary); margin-bottom: 4px; font-family: monospace;",
|
||||
"{file_path}"
|
||||
if line_number > 0 {
|
||||
":{line_number}"
|
||||
}
|
||||
}
|
||||
}
|
||||
pre { class: "code-block", "{code}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
7
compliance-dashboard/src/components/mod.rs
Normal file
7
compliance-dashboard/src/components/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod app_shell;
|
||||
pub mod code_snippet;
|
||||
pub mod page_header;
|
||||
pub mod pagination;
|
||||
pub mod severity_badge;
|
||||
pub mod sidebar;
|
||||
pub mod stat_card;
|
||||
13
compliance-dashboard/src/components/page_header.rs
Normal file
13
compliance-dashboard/src/components/page_header.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[component]
|
||||
pub fn PageHeader(title: String, #[props(default)] description: String) -> Element {
|
||||
rsx! {
|
||||
div { class: "page-header",
|
||||
h2 { "{title}" }
|
||||
if !description.is_empty() {
|
||||
p { "{description}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
33
compliance-dashboard/src/components/pagination.rs
Normal file
33
compliance-dashboard/src/components/pagination.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[component]
|
||||
pub fn Pagination(
|
||||
current_page: u64,
|
||||
total_pages: u64,
|
||||
on_page_change: EventHandler<u64>,
|
||||
) -> Element {
|
||||
if total_pages <= 1 {
|
||||
return rsx! {};
|
||||
}
|
||||
|
||||
rsx! {
|
||||
div { class: "pagination",
|
||||
button {
|
||||
class: "btn btn-ghost",
|
||||
disabled: current_page <= 1,
|
||||
onclick: move |_| on_page_change.call(current_page.saturating_sub(1)),
|
||||
"Previous"
|
||||
}
|
||||
span {
|
||||
style: "color: var(--text-secondary); font-size: 14px;",
|
||||
"Page {current_page} of {total_pages}"
|
||||
}
|
||||
button {
|
||||
class: "btn btn-ghost",
|
||||
disabled: current_page >= total_pages,
|
||||
onclick: move |_| on_page_change.call(current_page + 1),
|
||||
"Next"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
compliance-dashboard/src/components/severity_badge.rs
Normal file
16
compliance-dashboard/src/components/severity_badge.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[component]
|
||||
pub fn SeverityBadge(severity: String) -> Element {
|
||||
let class = match severity.to_lowercase().as_str() {
|
||||
"critical" => "badge badge-critical",
|
||||
"high" => "badge badge-high",
|
||||
"medium" => "badge badge-medium",
|
||||
"low" => "badge badge-low",
|
||||
_ => "badge badge-info",
|
||||
};
|
||||
|
||||
rsx! {
|
||||
span { class: class, "{severity}" }
|
||||
}
|
||||
}
|
||||
81
compliance-dashboard/src/components/sidebar.rs
Normal file
81
compliance-dashboard/src/components/sidebar.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use dioxus::prelude::*;
|
||||
use dioxus_free_icons::icons::bs_icons::*;
|
||||
use dioxus_free_icons::Icon;
|
||||
|
||||
use crate::app::Route;
|
||||
|
||||
struct NavItem {
|
||||
label: &'static str,
|
||||
route: Route,
|
||||
icon: Element,
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Sidebar() -> Element {
|
||||
let current_route = use_route::<Route>();
|
||||
|
||||
let nav_items = [
|
||||
NavItem {
|
||||
label: "Overview",
|
||||
route: Route::OverviewPage {},
|
||||
icon: rsx! { Icon { icon: BsSpeedometer2, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "Repositories",
|
||||
route: Route::RepositoriesPage {},
|
||||
icon: rsx! { Icon { icon: BsFolder2Open, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "Findings",
|
||||
route: Route::FindingsPage {},
|
||||
icon: rsx! { Icon { icon: BsShieldExclamation, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "SBOM",
|
||||
route: Route::SbomPage {},
|
||||
icon: rsx! { Icon { icon: BsBoxSeam, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "Issues",
|
||||
route: Route::IssuesPage {},
|
||||
icon: rsx! { Icon { icon: BsListTask, width: 18, height: 18 } },
|
||||
},
|
||||
NavItem {
|
||||
label: "Settings",
|
||||
route: Route::SettingsPage {},
|
||||
icon: rsx! { Icon { icon: BsGear, width: 18, height: 18 } },
|
||||
},
|
||||
];
|
||||
|
||||
rsx! {
|
||||
nav { class: "sidebar",
|
||||
div { class: "sidebar-header",
|
||||
Icon { icon: BsShieldCheck, width: 24, height: 24 }
|
||||
h1 { "Compliance Scanner" }
|
||||
}
|
||||
div { class: "sidebar-nav",
|
||||
for item in nav_items {
|
||||
{
|
||||
let is_active = match (¤t_route, &item.route) {
|
||||
(Route::FindingDetailPage { .. }, Route::FindingsPage {}) => true,
|
||||
(a, b) => a == b,
|
||||
};
|
||||
let class = if is_active { "nav-item active" } else { "nav-item" };
|
||||
rsx! {
|
||||
Link {
|
||||
to: item.route.clone(),
|
||||
class: class,
|
||||
{item.icon}
|
||||
span { "{item.label}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
div {
|
||||
style: "padding: 16px; border-top: 1px solid var(--border); font-size: 12px; color: var(--text-secondary);",
|
||||
"v0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
21
compliance-dashboard/src/components/stat_card.rs
Normal file
21
compliance-dashboard/src/components/stat_card.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[component]
|
||||
pub fn StatCard(
|
||||
label: String,
|
||||
value: String,
|
||||
#[props(default)] color: String,
|
||||
) -> Element {
|
||||
let value_style = if color.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("color: {color}")
|
||||
};
|
||||
|
||||
rsx! {
|
||||
div { class: "stat-card",
|
||||
div { class: "label", "{label}" }
|
||||
div { class: "value", style: value_style, "{value}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
18
compliance-dashboard/src/infrastructure/config.rs
Normal file
18
compliance-dashboard/src/infrastructure/config.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use compliance_core::DashboardConfig;
|
||||
|
||||
use super::error::DashboardError;
|
||||
|
||||
pub fn load_config() -> Result<DashboardConfig, DashboardError> {
|
||||
Ok(DashboardConfig {
|
||||
mongodb_uri: std::env::var("MONGODB_URI")
|
||||
.map_err(|_| DashboardError::Config("Missing MONGODB_URI".to_string()))?,
|
||||
mongodb_database: std::env::var("MONGODB_DATABASE")
|
||||
.unwrap_or_else(|_| "compliance_scanner".to_string()),
|
||||
agent_api_url: std::env::var("AGENT_API_URL")
|
||||
.unwrap_or_else(|_| "http://localhost:3001".to_string()),
|
||||
dashboard_port: std::env::var("DASHBOARD_PORT")
|
||||
.ok()
|
||||
.and_then(|p| p.parse().ok())
|
||||
.unwrap_or(8080),
|
||||
})
|
||||
}
|
||||
45
compliance-dashboard/src/infrastructure/database.rs
Normal file
45
compliance-dashboard/src/infrastructure/database.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use mongodb::bson::doc;
|
||||
use mongodb::{Client, Collection};
|
||||
|
||||
use compliance_core::models::*;
|
||||
|
||||
use super::error::DashboardError;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Database {
|
||||
inner: mongodb::Database,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn connect(uri: &str, db_name: &str) -> Result<Self, DashboardError> {
|
||||
let client = Client::with_uri_str(uri).await?;
|
||||
let db = client.database(db_name);
|
||||
db.run_command(doc! { "ping": 1 }).await?;
|
||||
tracing::info!("Dashboard connected to MongoDB '{db_name}'");
|
||||
Ok(Self { inner: db })
|
||||
}
|
||||
|
||||
pub fn repositories(&self) -> Collection<TrackedRepository> {
|
||||
self.inner.collection("repositories")
|
||||
}
|
||||
|
||||
pub fn findings(&self) -> Collection<Finding> {
|
||||
self.inner.collection("findings")
|
||||
}
|
||||
|
||||
pub fn scan_runs(&self) -> Collection<ScanRun> {
|
||||
self.inner.collection("scan_runs")
|
||||
}
|
||||
|
||||
pub fn sbom_entries(&self) -> Collection<SbomEntry> {
|
||||
self.inner.collection("sbom_entries")
|
||||
}
|
||||
|
||||
pub fn cve_alerts(&self) -> Collection<CveAlert> {
|
||||
self.inner.collection("cve_alerts")
|
||||
}
|
||||
|
||||
pub fn tracker_issues(&self) -> Collection<TrackerIssue> {
|
||||
self.inner.collection("tracker_issues")
|
||||
}
|
||||
}
|
||||
26
compliance-dashboard/src/infrastructure/error.rs
Normal file
26
compliance-dashboard/src/infrastructure/error.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use dioxus::prelude::*;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DashboardError {
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] mongodb::error::Error),
|
||||
|
||||
#[error("HTTP error: {0}")]
|
||||
Http(#[from] reqwest::Error),
|
||||
|
||||
#[error("JSON error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
|
||||
#[error("Configuration error: {0}")]
|
||||
Config(String),
|
||||
|
||||
#[error("{0}")]
|
||||
Other(String),
|
||||
}
|
||||
|
||||
impl From<DashboardError> for ServerFnError {
|
||||
fn from(err: DashboardError) -> Self {
|
||||
ServerFnError::new(err.to_string())
|
||||
}
|
||||
}
|
||||
71
compliance-dashboard/src/infrastructure/findings.rs
Normal file
71
compliance-dashboard/src/infrastructure/findings.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::Finding;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct FindingsListResponse {
|
||||
pub data: Vec<Finding>,
|
||||
pub total: Option<u64>,
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_findings(
|
||||
page: u64,
|
||||
severity: String,
|
||||
scan_type: String,
|
||||
status: String,
|
||||
repo_id: String,
|
||||
) -> Result<FindingsListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
|
||||
let mut url = format!("{}/api/v1/findings?page={page}&limit=20", state.agent_api_url);
|
||||
if !severity.is_empty() {
|
||||
url.push_str(&format!("&severity={severity}"));
|
||||
}
|
||||
if !scan_type.is_empty() {
|
||||
url.push_str(&format!("&scan_type={scan_type}"));
|
||||
}
|
||||
if !status.is_empty() {
|
||||
url.push_str(&format!("&status={status}"));
|
||||
}
|
||||
if !repo_id.is_empty() {
|
||||
url.push_str(&format!("&repo_id={repo_id}"));
|
||||
}
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: FindingsListResponse = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_finding_detail(id: String) -> Result<Finding, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/findings/{id}", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: serde_json::Value = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let finding: Finding = serde_json::from_value(body["data"].clone())
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(finding)
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn update_finding_status(id: String, status: String) -> Result<(), ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/findings/{id}/status", state.agent_api_url);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
client
|
||||
.patch(&url)
|
||||
.json(&serde_json::json!({ "status": status }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
22
compliance-dashboard/src/infrastructure/issues.rs
Normal file
22
compliance-dashboard/src/infrastructure/issues.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::TrackerIssue;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct IssuesListResponse {
|
||||
pub data: Vec<TrackerIssue>,
|
||||
pub total: Option<u64>,
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_issues(page: u64) -> Result<IssuesListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/issues?page={page}&limit=20", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: IssuesListResponse = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
13
compliance-dashboard/src/infrastructure/mod.rs
Normal file
13
compliance-dashboard/src/infrastructure/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub mod config;
|
||||
pub mod database;
|
||||
pub mod error;
|
||||
pub mod findings;
|
||||
pub mod issues;
|
||||
pub mod repositories;
|
||||
pub mod sbom;
|
||||
pub mod scans;
|
||||
pub mod server;
|
||||
pub mod server_state;
|
||||
pub mod stats;
|
||||
|
||||
pub use server::server_start;
|
||||
64
compliance-dashboard/src/infrastructure/repositories.rs
Normal file
64
compliance-dashboard/src/infrastructure/repositories.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::TrackedRepository;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct RepositoryListResponse {
|
||||
pub data: Vec<TrackedRepository>,
|
||||
pub total: Option<u64>,
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_repositories(page: u64) -> Result<RepositoryListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/repositories?page={page}&limit=20", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: RepositoryListResponse = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn add_repository(name: String, git_url: String, default_branch: String) -> Result<(), ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/repositories", state.agent_api_url);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let resp = client
|
||||
.post(&url)
|
||||
.json(&serde_json::json!({
|
||||
"name": name,
|
||||
"git_url": git_url,
|
||||
"default_branch": default_branch,
|
||||
}))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(ServerFnError::new(format!("Failed to add repository: {body}")));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn trigger_repo_scan(repo_id: String) -> Result<(), ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/repositories/{repo_id}/scan", state.agent_api_url);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
client
|
||||
.post(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
22
compliance-dashboard/src/infrastructure/sbom.rs
Normal file
22
compliance-dashboard/src/infrastructure/sbom.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::SbomEntry;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct SbomListResponse {
|
||||
pub data: Vec<SbomEntry>,
|
||||
pub total: Option<u64>,
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_sbom(page: u64) -> Result<SbomListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/sbom?page={page}&limit=50", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: SbomListResponse = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
22
compliance-dashboard/src/infrastructure/scans.rs
Normal file
22
compliance-dashboard/src/infrastructure/scans.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use compliance_core::models::ScanRun;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct ScansListResponse {
|
||||
pub data: Vec<ScanRun>,
|
||||
pub total: Option<u64>,
|
||||
pub page: Option<u64>,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_scan_runs(page: u64) -> Result<ScansListResponse, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/scan-runs?page={page}&limit=20", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: ScansListResponse = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
Ok(body)
|
||||
}
|
||||
41
compliance-dashboard/src/infrastructure/server.rs
Normal file
41
compliance-dashboard/src/infrastructure/server.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use super::config;
|
||||
use super::database::Database;
|
||||
use super::error::DashboardError;
|
||||
use super::server_state::{ServerState, ServerStateInner};
|
||||
|
||||
pub fn server_start(app: fn() -> Element) -> Result<(), DashboardError> {
|
||||
tokio::runtime::Runtime::new()
|
||||
.map_err(|e| DashboardError::Other(e.to_string()))?
|
||||
.block_on(async move {
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
let config = config::load_config()?;
|
||||
let db = Database::connect(&config.mongodb_uri, &config.mongodb_database).await?;
|
||||
|
||||
let server_state: ServerState = ServerStateInner {
|
||||
agent_api_url: config.agent_api_url.clone(),
|
||||
db,
|
||||
config,
|
||||
}
|
||||
.into();
|
||||
|
||||
let addr = dioxus_cli_config::fullstack_address_or_localhost();
|
||||
let listener = tokio::net::TcpListener::bind(addr)
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("Failed to bind: {e}")))?;
|
||||
|
||||
tracing::info!("Dashboard server listening on {addr}");
|
||||
|
||||
let router = axum::Router::new()
|
||||
.serve_dioxus_application(ServeConfig::new(), app)
|
||||
.layer(axum::Extension(server_state));
|
||||
|
||||
axum::serve(listener, router.into_make_service())
|
||||
.await
|
||||
.map_err(|e| DashboardError::Other(format!("Server error: {e}")))?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
46
compliance-dashboard/src/infrastructure/server_state.rs
Normal file
46
compliance-dashboard/src/infrastructure/server_state.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use compliance_core::DashboardConfig;
|
||||
|
||||
use super::database::Database;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ServerState(Arc<ServerStateInner>);
|
||||
|
||||
impl Deref for ServerState {
|
||||
type Target = ServerStateInner;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ServerStateInner {
|
||||
pub db: Database,
|
||||
pub config: DashboardConfig,
|
||||
pub agent_api_url: String,
|
||||
}
|
||||
|
||||
impl From<ServerStateInner> for ServerState {
|
||||
fn from(inner: ServerStateInner) -> Self {
|
||||
Self(Arc::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> axum::extract::FromRequestParts<S> for ServerState
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = axum::http::StatusCode;
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut axum::http::request::Parts,
|
||||
_state: &S,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
parts
|
||||
.extensions
|
||||
.get::<ServerState>()
|
||||
.cloned()
|
||||
.ok_or(axum::http::StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
27
compliance-dashboard/src/infrastructure/stats.rs
Normal file
27
compliance-dashboard/src/infrastructure/stats.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use dioxus::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct OverviewStats {
|
||||
pub total_repositories: u64,
|
||||
pub total_findings: u64,
|
||||
pub critical_findings: u64,
|
||||
pub high_findings: u64,
|
||||
pub medium_findings: u64,
|
||||
pub low_findings: u64,
|
||||
pub total_sbom_entries: u64,
|
||||
pub total_cve_alerts: u64,
|
||||
pub total_issues: u64,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_overview_stats() -> Result<OverviewStats, ServerFnError> {
|
||||
let state: super::server_state::ServerState =
|
||||
dioxus_fullstack::FullstackContext::extract().await?;
|
||||
let url = format!("{}/api/v1/stats/overview", state.agent_api_url);
|
||||
|
||||
let resp = reqwest::get(&url).await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let body: serde_json::Value = resp.json().await.map_err(|e| ServerFnError::new(e.to_string()))?;
|
||||
let stats: OverviewStats = serde_json::from_value(body["data"].clone()).unwrap_or_default();
|
||||
Ok(stats)
|
||||
}
|
||||
8
compliance-dashboard/src/lib.rs
Normal file
8
compliance-dashboard/src/lib.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod app;
|
||||
pub mod components;
|
||||
pub mod pages;
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
pub mod infrastructure;
|
||||
|
||||
pub use app::App;
|
||||
117
compliance-dashboard/src/pages/finding_detail.rs
Normal file
117
compliance-dashboard/src/pages/finding_detail.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::code_snippet::CodeSnippet;
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::severity_badge::SeverityBadge;
|
||||
|
||||
#[component]
|
||||
pub fn FindingDetailPage(id: String) -> Element {
|
||||
let finding_id = id.clone();
|
||||
|
||||
let finding = use_resource(move || {
|
||||
let fid = finding_id.clone();
|
||||
async move {
|
||||
crate::infrastructure::findings::fetch_finding_detail(fid).await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
let snapshot = finding.read().clone();
|
||||
|
||||
match snapshot {
|
||||
Some(Some(f)) => {
|
||||
let finding_id_for_status = id.clone();
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: f.title.clone(),
|
||||
description: format!("{} | {} | {}", f.scanner, f.scan_type, f.status),
|
||||
}
|
||||
|
||||
div { style: "display: flex; gap: 8px; margin-bottom: 16px;",
|
||||
SeverityBadge { severity: f.severity.to_string() }
|
||||
if let Some(cwe) = &f.cwe {
|
||||
span { class: "badge badge-info", "{cwe}" }
|
||||
}
|
||||
if let Some(cve) = &f.cve {
|
||||
span { class: "badge badge-high", "{cve}" }
|
||||
}
|
||||
if let Some(score) = f.cvss_score {
|
||||
span { class: "badge badge-medium", "CVSS: {score}" }
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Description" }
|
||||
p { style: "line-height: 1.6;", "{f.description}" }
|
||||
}
|
||||
|
||||
if let Some(code) = &f.code_snippet {
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Code Evidence" }
|
||||
CodeSnippet {
|
||||
code: code.clone(),
|
||||
file_path: f.file_path.clone().unwrap_or_default(),
|
||||
line_number: f.line_number.unwrap_or(0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(remediation) = &f.remediation {
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Remediation" }
|
||||
p { style: "line-height: 1.6;", "{remediation}" }
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(fix) = &f.suggested_fix {
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Suggested Fix" }
|
||||
CodeSnippet { code: fix.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(url) = &f.tracker_issue_url {
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Linked Issue" }
|
||||
a {
|
||||
href: "{url}",
|
||||
target: "_blank",
|
||||
style: "color: var(--accent);",
|
||||
"{url}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Update Status" }
|
||||
div { style: "display: flex; gap: 8px;",
|
||||
for status in ["open", "triaged", "resolved", "false_positive", "ignored"] {
|
||||
{
|
||||
let status_str = status.to_string();
|
||||
let id_clone = finding_id_for_status.clone();
|
||||
rsx! {
|
||||
button {
|
||||
class: "btn btn-ghost",
|
||||
onclick: move |_| {
|
||||
let s = status_str.clone();
|
||||
let id = id_clone.clone();
|
||||
spawn(async move {
|
||||
let _ = crate::infrastructure::findings::update_finding_status(id, s).await;
|
||||
});
|
||||
},
|
||||
"{status}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card", p { "Finding not found." } }
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading finding..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
124
compliance-dashboard/src/pages/findings.rs
Normal file
124
compliance-dashboard/src/pages/findings.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::app::Route;
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::pagination::Pagination;
|
||||
use crate::components::severity_badge::SeverityBadge;
|
||||
|
||||
#[component]
|
||||
pub fn FindingsPage() -> Element {
|
||||
let mut page = use_signal(|| 1u64);
|
||||
let mut severity_filter = use_signal(String::new);
|
||||
let mut type_filter = use_signal(String::new);
|
||||
let mut status_filter = use_signal(String::new);
|
||||
|
||||
let findings = use_resource(move || {
|
||||
let p = page();
|
||||
let sev = severity_filter();
|
||||
let typ = type_filter();
|
||||
let stat = status_filter();
|
||||
async move {
|
||||
crate::infrastructure::findings::fetch_findings(p, sev, typ, stat, String::new()).await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "Findings",
|
||||
description: "Security and compliance findings across all repositories",
|
||||
}
|
||||
|
||||
div { class: "filter-bar",
|
||||
select {
|
||||
onchange: move |e| { severity_filter.set(e.value()); page.set(1); },
|
||||
option { value: "", "All Severities" }
|
||||
option { value: "critical", "Critical" }
|
||||
option { value: "high", "High" }
|
||||
option { value: "medium", "Medium" }
|
||||
option { value: "low", "Low" }
|
||||
option { value: "info", "Info" }
|
||||
}
|
||||
select {
|
||||
onchange: move |e| { type_filter.set(e.value()); page.set(1); },
|
||||
option { value: "", "All Types" }
|
||||
option { value: "sast", "SAST" }
|
||||
option { value: "sbom", "SBOM" }
|
||||
option { value: "cve", "CVE" }
|
||||
option { value: "gdpr", "GDPR" }
|
||||
option { value: "oauth", "OAuth" }
|
||||
}
|
||||
select {
|
||||
onchange: move |e| { status_filter.set(e.value()); page.set(1); },
|
||||
option { value: "", "All Statuses" }
|
||||
option { value: "open", "Open" }
|
||||
option { value: "triaged", "Triaged" }
|
||||
option { value: "resolved", "Resolved" }
|
||||
option { value: "false_positive", "False Positive" }
|
||||
option { value: "ignored", "Ignored" }
|
||||
}
|
||||
}
|
||||
|
||||
match &*findings.read() {
|
||||
Some(Some(resp)) => {
|
||||
let total_pages = resp.total.unwrap_or(0).div_ceil(20).max(1);
|
||||
rsx! {
|
||||
div { class: "card",
|
||||
div { class: "table-wrapper",
|
||||
table {
|
||||
thead {
|
||||
tr {
|
||||
th { "Severity" }
|
||||
th { "Title" }
|
||||
th { "Type" }
|
||||
th { "Scanner" }
|
||||
th { "File" }
|
||||
th { "Status" }
|
||||
}
|
||||
}
|
||||
tbody {
|
||||
for finding in &resp.data {
|
||||
{
|
||||
let id = finding.id.as_ref().map(|id| id.to_hex()).unwrap_or_default();
|
||||
rsx! {
|
||||
tr {
|
||||
td { SeverityBadge { severity: finding.severity.to_string() } }
|
||||
td {
|
||||
Link {
|
||||
to: Route::FindingDetailPage { id: id },
|
||||
style: "color: var(--accent); text-decoration: none;",
|
||||
"{finding.title}"
|
||||
}
|
||||
}
|
||||
td { "{finding.scan_type}" }
|
||||
td { "{finding.scanner}" }
|
||||
td {
|
||||
style: "font-family: monospace; font-size: 12px;",
|
||||
"{finding.file_path.as_deref().unwrap_or(\"-\")}"
|
||||
}
|
||||
td {
|
||||
span { class: "badge badge-info", "{finding.status}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Pagination {
|
||||
current_page: page(),
|
||||
total_pages: total_pages,
|
||||
on_page_change: move |p| page.set(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card", p { "Failed to load findings." } }
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading findings..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
87
compliance-dashboard/src/pages/issues.rs
Normal file
87
compliance-dashboard/src/pages/issues.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::pagination::Pagination;
|
||||
|
||||
#[component]
|
||||
pub fn IssuesPage() -> Element {
|
||||
let mut page = use_signal(|| 1u64);
|
||||
|
||||
let issues = use_resource(move || {
|
||||
let p = page();
|
||||
async move {
|
||||
crate::infrastructure::issues::fetch_issues(p).await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "Issues",
|
||||
description: "Cross-tracker issue view - GitHub, GitLab, and Jira",
|
||||
}
|
||||
|
||||
match &*issues.read() {
|
||||
Some(Some(resp)) => {
|
||||
let total_pages = resp.total.unwrap_or(0).div_ceil(20).max(1);
|
||||
rsx! {
|
||||
div { class: "card",
|
||||
div { class: "table-wrapper",
|
||||
table {
|
||||
thead {
|
||||
tr {
|
||||
th { "Tracker" }
|
||||
th { "ID" }
|
||||
th { "Title" }
|
||||
th { "Status" }
|
||||
th { "Created" }
|
||||
th { "Link" }
|
||||
}
|
||||
}
|
||||
tbody {
|
||||
for issue in &resp.data {
|
||||
tr {
|
||||
td {
|
||||
span { class: "badge badge-info", "{issue.tracker_type}" }
|
||||
}
|
||||
td {
|
||||
style: "font-family: monospace;",
|
||||
"{issue.external_id}"
|
||||
}
|
||||
td { "{issue.title}" }
|
||||
td {
|
||||
span { class: "badge badge-info", "{issue.status}" }
|
||||
}
|
||||
td {
|
||||
style: "font-size: 12px; color: var(--text-secondary);",
|
||||
{issue.created_at.format("%Y-%m-%d %H:%M").to_string()}
|
||||
}
|
||||
td {
|
||||
a {
|
||||
href: "{issue.external_url}",
|
||||
target: "_blank",
|
||||
style: "color: var(--accent); text-decoration: none;",
|
||||
"Open"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Pagination {
|
||||
current_page: page(),
|
||||
total_pages: total_pages,
|
||||
on_page_change: move |p| page.set(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card", p { "Failed to load issues." } }
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading issues..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
15
compliance-dashboard/src/pages/mod.rs
Normal file
15
compliance-dashboard/src/pages/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
pub mod finding_detail;
|
||||
pub mod findings;
|
||||
pub mod issues;
|
||||
pub mod overview;
|
||||
pub mod repositories;
|
||||
pub mod sbom;
|
||||
pub mod settings;
|
||||
|
||||
pub use finding_detail::FindingDetailPage;
|
||||
pub use findings::FindingsPage;
|
||||
pub use issues::IssuesPage;
|
||||
pub use overview::OverviewPage;
|
||||
pub use repositories::RepositoriesPage;
|
||||
pub use sbom::SbomPage;
|
||||
pub use settings::SettingsPage;
|
||||
104
compliance-dashboard/src/pages/overview.rs
Normal file
104
compliance-dashboard/src/pages/overview.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::stat_card::StatCard;
|
||||
|
||||
#[cfg(feature = "server")]
|
||||
use crate::infrastructure::stats::fetch_overview_stats;
|
||||
|
||||
#[component]
|
||||
pub fn OverviewPage() -> Element {
|
||||
let stats = use_resource(move || async move {
|
||||
#[cfg(feature = "server")]
|
||||
{
|
||||
fetch_overview_stats().await.ok()
|
||||
}
|
||||
#[cfg(not(feature = "server"))]
|
||||
{
|
||||
crate::infrastructure::stats::fetch_overview_stats().await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "Overview",
|
||||
description: "Security and compliance scanning dashboard",
|
||||
}
|
||||
|
||||
match &*stats.read() {
|
||||
Some(Some(s)) => rsx! {
|
||||
div { class: "stat-cards",
|
||||
StatCard { label: "Repositories", value: s.total_repositories.to_string() }
|
||||
StatCard { label: "Total Findings", value: s.total_findings.to_string() }
|
||||
StatCard {
|
||||
label: "Critical",
|
||||
value: s.critical_findings.to_string(),
|
||||
color: "var(--danger)",
|
||||
}
|
||||
StatCard {
|
||||
label: "High",
|
||||
value: s.high_findings.to_string(),
|
||||
color: "#f97316",
|
||||
}
|
||||
StatCard {
|
||||
label: "Medium",
|
||||
value: s.medium_findings.to_string(),
|
||||
color: "var(--warning)",
|
||||
}
|
||||
StatCard {
|
||||
label: "Low",
|
||||
value: s.low_findings.to_string(),
|
||||
color: "var(--success)",
|
||||
}
|
||||
StatCard { label: "Dependencies", value: s.total_sbom_entries.to_string() }
|
||||
StatCard { label: "CVE Alerts", value: s.total_cve_alerts.to_string() }
|
||||
StatCard { label: "Tracker Issues", value: s.total_issues.to_string() }
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Severity Distribution" }
|
||||
div {
|
||||
style: "display: flex; gap: 8px; align-items: flex-end; height: 200px; padding: 16px;",
|
||||
SeverityBar { label: "Critical", count: s.critical_findings, max: s.total_findings, color: "var(--danger)" }
|
||||
SeverityBar { label: "High", count: s.high_findings, max: s.total_findings, color: "#f97316" }
|
||||
SeverityBar { label: "Medium", count: s.medium_findings, max: s.total_findings, color: "var(--warning)" }
|
||||
SeverityBar { label: "Low", count: s.low_findings, max: s.total_findings, color: "var(--success)" }
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card",
|
||||
p { style: "color: var(--text-secondary);",
|
||||
"Unable to load stats. Make sure the agent API is running."
|
||||
}
|
||||
}
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading overview..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn SeverityBar(label: String, count: u64, max: u64, color: String) -> Element {
|
||||
let height_pct = if max > 0 { (count as f64 / max as f64) * 100.0 } else { 0.0 };
|
||||
let height = format!("{}%", height_pct.max(2.0));
|
||||
|
||||
rsx! {
|
||||
div {
|
||||
style: "flex: 1; display: flex; flex-direction: column; align-items: center; gap: 4px;",
|
||||
div {
|
||||
style: "font-size: 14px; font-weight: 600;",
|
||||
"{count}"
|
||||
}
|
||||
div {
|
||||
style: "width: 100%; background: {color}; border-radius: 4px 4px 0 0; height: {height}; min-height: 4px; transition: height 0.3s;",
|
||||
}
|
||||
div {
|
||||
style: "font-size: 11px; color: var(--text-secondary);",
|
||||
"{label}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
155
compliance-dashboard/src/pages/repositories.rs
Normal file
155
compliance-dashboard/src/pages/repositories.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::pagination::Pagination;
|
||||
|
||||
#[component]
|
||||
pub fn RepositoriesPage() -> Element {
|
||||
let mut page = use_signal(|| 1u64);
|
||||
let mut show_add_form = use_signal(|| false);
|
||||
let mut name = use_signal(String::new);
|
||||
let mut git_url = use_signal(String::new);
|
||||
let mut branch = use_signal(|| "main".to_string());
|
||||
|
||||
let repos = use_resource(move || {
|
||||
let p = page();
|
||||
async move {
|
||||
crate::infrastructure::repositories::fetch_repositories(p).await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "Repositories",
|
||||
description: "Tracked git repositories",
|
||||
}
|
||||
|
||||
div { style: "margin-bottom: 16px;",
|
||||
button {
|
||||
class: "btn btn-primary",
|
||||
onclick: move |_| show_add_form.toggle(),
|
||||
if show_add_form() { "Cancel" } else { "+ Add Repository" }
|
||||
}
|
||||
}
|
||||
|
||||
if show_add_form() {
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Add Repository" }
|
||||
div { class: "form-group",
|
||||
label { "Name" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "my-project",
|
||||
value: "{name}",
|
||||
oninput: move |e| name.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Git URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "https://github.com/org/repo.git",
|
||||
value: "{git_url}",
|
||||
oninput: move |e| git_url.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Default Branch" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "main",
|
||||
value: "{branch}",
|
||||
oninput: move |e| branch.set(e.value()),
|
||||
}
|
||||
}
|
||||
button {
|
||||
class: "btn btn-primary",
|
||||
onclick: move |_| {
|
||||
let n = name();
|
||||
let u = git_url();
|
||||
let b = branch();
|
||||
spawn(async move {
|
||||
let _ = crate::infrastructure::repositories::add_repository(n, u, b).await;
|
||||
});
|
||||
show_add_form.set(false);
|
||||
name.set(String::new());
|
||||
git_url.set(String::new());
|
||||
},
|
||||
"Add"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match &*repos.read() {
|
||||
Some(Some(resp)) => {
|
||||
let total_pages = resp.total.unwrap_or(0).div_ceil(20).max(1);
|
||||
rsx! {
|
||||
div { class: "card",
|
||||
div { class: "table-wrapper",
|
||||
table {
|
||||
thead {
|
||||
tr {
|
||||
th { "Name" }
|
||||
th { "Git URL" }
|
||||
th { "Branch" }
|
||||
th { "Findings" }
|
||||
th { "Last Scanned" }
|
||||
th { "Actions" }
|
||||
}
|
||||
}
|
||||
tbody {
|
||||
for repo in &resp.data {
|
||||
{
|
||||
let repo_id = repo.id.as_ref().map(|id| id.to_hex()).unwrap_or_default();
|
||||
let repo_id_clone = repo_id.clone();
|
||||
rsx! {
|
||||
tr {
|
||||
td { "{repo.name}" }
|
||||
td {
|
||||
style: "font-size: 12px; font-family: monospace;",
|
||||
"{repo.git_url}"
|
||||
}
|
||||
td { "{repo.default_branch}" }
|
||||
td { "{repo.findings_count}" }
|
||||
td {
|
||||
match &repo.last_scanned_commit {
|
||||
Some(sha) => rsx! { span { style: "font-family: monospace; font-size: 12px;", "{&sha[..7.min(sha.len())]}" } },
|
||||
None => rsx! { span { style: "color: var(--text-secondary);", "Never" } },
|
||||
}
|
||||
}
|
||||
td {
|
||||
button {
|
||||
class: "btn btn-ghost",
|
||||
onclick: move |_| {
|
||||
let id = repo_id_clone.clone();
|
||||
spawn(async move {
|
||||
let _ = crate::infrastructure::repositories::trigger_repo_scan(id).await;
|
||||
});
|
||||
},
|
||||
"Scan"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Pagination {
|
||||
current_page: page(),
|
||||
total_pages: total_pages,
|
||||
on_page_change: move |p| page.set(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card", p { "Failed to load repositories." } }
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading repositories..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
85
compliance-dashboard/src/pages/sbom.rs
Normal file
85
compliance-dashboard/src/pages/sbom.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
use crate::components::pagination::Pagination;
|
||||
|
||||
#[component]
|
||||
pub fn SbomPage() -> Element {
|
||||
let mut page = use_signal(|| 1u64);
|
||||
|
||||
let sbom = use_resource(move || {
|
||||
let p = page();
|
||||
async move {
|
||||
crate::infrastructure::sbom::fetch_sbom(p).await.ok()
|
||||
}
|
||||
});
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "SBOM",
|
||||
description: "Software Bill of Materials - dependency inventory across all repositories",
|
||||
}
|
||||
|
||||
match &*sbom.read() {
|
||||
Some(Some(resp)) => {
|
||||
let total_pages = resp.total.unwrap_or(0).div_ceil(50).max(1);
|
||||
rsx! {
|
||||
div { class: "card",
|
||||
div { class: "table-wrapper",
|
||||
table {
|
||||
thead {
|
||||
tr {
|
||||
th { "Package" }
|
||||
th { "Version" }
|
||||
th { "Manager" }
|
||||
th { "License" }
|
||||
th { "Vulnerabilities" }
|
||||
}
|
||||
}
|
||||
tbody {
|
||||
for entry in &resp.data {
|
||||
tr {
|
||||
td {
|
||||
style: "font-weight: 500;",
|
||||
"{entry.name}"
|
||||
}
|
||||
td {
|
||||
style: "font-family: monospace; font-size: 13px;",
|
||||
"{entry.version}"
|
||||
}
|
||||
td { "{entry.package_manager}" }
|
||||
td { "{entry.license.as_deref().unwrap_or(\"-\")}" }
|
||||
td {
|
||||
if entry.known_vulnerabilities.is_empty() {
|
||||
span {
|
||||
style: "color: var(--success);",
|
||||
"None"
|
||||
}
|
||||
} else {
|
||||
span { class: "badge badge-high",
|
||||
"{entry.known_vulnerabilities.len()} vuln(s)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Pagination {
|
||||
current_page: page(),
|
||||
total_pages: total_pages,
|
||||
on_page_change: move |p| page.set(p),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(None) => rsx! {
|
||||
div { class: "card", p { "Failed to load SBOM." } }
|
||||
},
|
||||
None => rsx! {
|
||||
div { class: "loading", "Loading SBOM..." }
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
142
compliance-dashboard/src/pages/settings.rs
Normal file
142
compliance-dashboard/src/pages/settings.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
use dioxus::prelude::*;
|
||||
|
||||
use crate::components::page_header::PageHeader;
|
||||
|
||||
#[component]
|
||||
pub fn SettingsPage() -> Element {
|
||||
let mut litellm_url = use_signal(|| "http://localhost:4000".to_string());
|
||||
let mut litellm_model = use_signal(|| "gpt-4o".to_string());
|
||||
let mut github_token = use_signal(String::new);
|
||||
let mut gitlab_url = use_signal(|| "https://gitlab.com".to_string());
|
||||
let mut gitlab_token = use_signal(String::new);
|
||||
let mut jira_url = use_signal(String::new);
|
||||
let mut jira_email = use_signal(String::new);
|
||||
let mut jira_token = use_signal(String::new);
|
||||
let mut jira_project = use_signal(String::new);
|
||||
let mut searxng_url = use_signal(|| "http://localhost:8888".to_string());
|
||||
|
||||
rsx! {
|
||||
PageHeader {
|
||||
title: "Settings",
|
||||
description: "Configure integrations and scanning parameters",
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "LiteLLM Configuration" }
|
||||
div { class: "form-group",
|
||||
label { "LiteLLM URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
value: "{litellm_url}",
|
||||
oninput: move |e| litellm_url.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Model" }
|
||||
input {
|
||||
r#type: "text",
|
||||
value: "{litellm_model}",
|
||||
oninput: move |e| litellm_model.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "GitHub Integration" }
|
||||
div { class: "form-group",
|
||||
label { "Personal Access Token" }
|
||||
input {
|
||||
r#type: "password",
|
||||
placeholder: "ghp_...",
|
||||
value: "{github_token}",
|
||||
oninput: move |e| github_token.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "GitLab Integration" }
|
||||
div { class: "form-group",
|
||||
label { "GitLab URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
value: "{gitlab_url}",
|
||||
oninput: move |e| gitlab_url.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Access Token" }
|
||||
input {
|
||||
r#type: "password",
|
||||
placeholder: "glpat-...",
|
||||
value: "{gitlab_token}",
|
||||
oninput: move |e| gitlab_token.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "Jira Integration" }
|
||||
div { class: "form-group",
|
||||
label { "Jira URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "https://your-org.atlassian.net",
|
||||
value: "{jira_url}",
|
||||
oninput: move |e| jira_url.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Email" }
|
||||
input {
|
||||
r#type: "email",
|
||||
value: "{jira_email}",
|
||||
oninput: move |e| jira_email.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "API Token" }
|
||||
input {
|
||||
r#type: "password",
|
||||
value: "{jira_token}",
|
||||
oninput: move |e| jira_token.set(e.value()),
|
||||
}
|
||||
}
|
||||
div { class: "form-group",
|
||||
label { "Project Key" }
|
||||
input {
|
||||
r#type: "text",
|
||||
placeholder: "SEC",
|
||||
value: "{jira_project}",
|
||||
oninput: move |e| jira_project.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { class: "card",
|
||||
div { class: "card-header", "SearXNG" }
|
||||
div { class: "form-group",
|
||||
label { "SearXNG URL" }
|
||||
input {
|
||||
r#type: "text",
|
||||
value: "{searxng_url}",
|
||||
oninput: move |e| searxng_url.set(e.value()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
div { style: "margin-top: 16px;",
|
||||
button {
|
||||
class: "btn btn-primary",
|
||||
onclick: move |_| {
|
||||
tracing::info!("Settings save not yet implemented - settings are managed via .env");
|
||||
},
|
||||
"Save Settings"
|
||||
}
|
||||
p {
|
||||
style: "margin-top: 8px; font-size: 12px; color: var(--text-secondary);",
|
||||
"Note: Settings are currently configured via environment variables (.env file). Dashboard-based settings persistence coming soon."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
45
docker-compose.yml
Normal file
45
docker-compose.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
services:
|
||||
mongo:
|
||||
image: mongo:latest
|
||||
ports:
|
||||
- "27017:27017"
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: example
|
||||
volumes:
|
||||
- mongo_data:/data/db
|
||||
|
||||
searxng:
|
||||
image: searxng/searxng:latest
|
||||
ports:
|
||||
- "8888:8080"
|
||||
environment:
|
||||
- SEARXNG_BASE_URL=http://localhost:8888
|
||||
|
||||
agent:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.agent
|
||||
ports:
|
||||
- "3001:3001"
|
||||
- "3002:3002"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- mongo
|
||||
volumes:
|
||||
- repos_data:/tmp/compliance-scanner/repos
|
||||
|
||||
dashboard:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.dashboard
|
||||
ports:
|
||||
- "8080:8080"
|
||||
env_file: .env
|
||||
depends_on:
|
||||
- mongo
|
||||
- agent
|
||||
|
||||
volumes:
|
||||
mongo_data:
|
||||
repos_data:
|
||||
1
styles/input.css
Normal file
1
styles/input.css
Normal file
@@ -0,0 +1 @@
|
||||
@import "tailwindcss";
|
||||
Reference in New Issue
Block a user