Initial commit: breakpilot-compliance - Compliance SDK Platform

Services: Admin-Compliance, Backend-Compliance,
AI-Compliance-SDK, Consent-SDK, Developer-Portal,
PCA-Platform, DSMS

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Boenisch
2026-02-11 23:47:28 +01:00
commit 4435e7ea0a
734 changed files with 251369 additions and 0 deletions

View File

@@ -0,0 +1,308 @@
package roadmap
import (
"time"
"github.com/google/uuid"
)
// ============================================================================
// Constants / Enums
// ============================================================================
// ItemStatus represents the implementation status of a roadmap item
type ItemStatus string
const (
ItemStatusPlanned ItemStatus = "PLANNED"
ItemStatusInProgress ItemStatus = "IN_PROGRESS"
ItemStatusBlocked ItemStatus = "BLOCKED"
ItemStatusCompleted ItemStatus = "COMPLETED"
ItemStatusDeferred ItemStatus = "DEFERRED"
)
// ItemPriority represents the priority of a roadmap item
type ItemPriority string
const (
ItemPriorityCritical ItemPriority = "CRITICAL"
ItemPriorityHigh ItemPriority = "HIGH"
ItemPriorityMedium ItemPriority = "MEDIUM"
ItemPriorityLow ItemPriority = "LOW"
)
// ItemCategory represents the category of a roadmap item
type ItemCategory string
const (
ItemCategoryTechnical ItemCategory = "TECHNICAL"
ItemCategoryOrganizational ItemCategory = "ORGANIZATIONAL"
ItemCategoryProcessual ItemCategory = "PROCESSUAL"
ItemCategoryDocumentation ItemCategory = "DOCUMENTATION"
ItemCategoryTraining ItemCategory = "TRAINING"
)
// ImportFormat represents supported import file formats
type ImportFormat string
const (
ImportFormatExcel ImportFormat = "EXCEL"
ImportFormatCSV ImportFormat = "CSV"
ImportFormatJSON ImportFormat = "JSON"
)
// ============================================================================
// Main Entities
// ============================================================================
// Roadmap represents a compliance implementation roadmap
type Roadmap struct {
ID uuid.UUID `json:"id"`
TenantID uuid.UUID `json:"tenant_id"`
NamespaceID *uuid.UUID `json:"namespace_id,omitempty"`
Title string `json:"title"`
Description string `json:"description,omitempty"`
Version string `json:"version"`
// Linked entities
AssessmentID *uuid.UUID `json:"assessment_id,omitempty"` // Link to UCCA assessment
PortfolioID *uuid.UUID `json:"portfolio_id,omitempty"` // Link to use case portfolio
// Status tracking
Status string `json:"status"` // "draft", "active", "completed", "archived"
TotalItems int `json:"total_items"`
CompletedItems int `json:"completed_items"`
Progress int `json:"progress"` // Percentage 0-100
// Dates
StartDate *time.Time `json:"start_date,omitempty"`
TargetDate *time.Time `json:"target_date,omitempty"`
// Audit
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
CreatedBy uuid.UUID `json:"created_by"`
}
// RoadmapItem represents a single item in the compliance roadmap
type RoadmapItem struct {
ID uuid.UUID `json:"id"`
RoadmapID uuid.UUID `json:"roadmap_id"`
// Core fields
Title string `json:"title"`
Description string `json:"description,omitempty"`
Category ItemCategory `json:"category"`
Priority ItemPriority `json:"priority"`
Status ItemStatus `json:"status"`
// Compliance mapping
ControlID string `json:"control_id,omitempty"` // e.g., "CTRL-AVV"
RegulationRef string `json:"regulation_ref,omitempty"` // e.g., "DSGVO Art. 28"
GapID string `json:"gap_id,omitempty"` // e.g., "GAP_AVV_MISSING"
// Effort estimation
EffortDays *int `json:"effort_days,omitempty"`
EffortHours *int `json:"effort_hours,omitempty"`
EstimatedCost *int `json:"estimated_cost,omitempty"` // EUR
// Assignment
AssigneeID *uuid.UUID `json:"assignee_id,omitempty"`
AssigneeName string `json:"assignee_name,omitempty"`
Department string `json:"department,omitempty"`
// Timeline
PlannedStart *time.Time `json:"planned_start,omitempty"`
PlannedEnd *time.Time `json:"planned_end,omitempty"`
ActualStart *time.Time `json:"actual_start,omitempty"`
ActualEnd *time.Time `json:"actual_end,omitempty"`
// Dependencies
DependsOn []uuid.UUID `json:"depends_on,omitempty"` // IDs of items this depends on
BlockedBy []uuid.UUID `json:"blocked_by,omitempty"` // IDs of blocking items
// Evidence
EvidenceRequired []string `json:"evidence_required,omitempty"`
EvidenceProvided []string `json:"evidence_provided,omitempty"`
// Notes
Notes string `json:"notes,omitempty"`
RiskNotes string `json:"risk_notes,omitempty"`
// Import metadata
SourceRow int `json:"source_row,omitempty"` // Row number from import file
SourceFile string `json:"source_file,omitempty"` // Original filename
// Ordering
SortOrder int `json:"sort_order"`
// Audit
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// ============================================================================
// Import/Export Structures
// ============================================================================
// ImportJob represents an import job
type ImportJob struct {
ID uuid.UUID `json:"id"`
TenantID uuid.UUID `json:"tenant_id"`
RoadmapID *uuid.UUID `json:"roadmap_id,omitempty"` // Target roadmap (nil = create new)
// File info
Filename string `json:"filename"`
Format ImportFormat `json:"format"`
FileSize int64 `json:"file_size"`
ContentType string `json:"content_type"`
// Status
Status string `json:"status"` // "pending", "parsing", "validating", "completed", "failed"
ErrorMessage string `json:"error_message,omitempty"`
// Parsing results
TotalRows int `json:"total_rows"`
ValidRows int `json:"valid_rows"`
InvalidRows int `json:"invalid_rows"`
ImportedItems int `json:"imported_items"`
// Parsed items (before confirmation)
ParsedItems []ParsedItem `json:"parsed_items,omitempty"`
// Audit
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
CompletedAt *time.Time `json:"completed_at,omitempty"`
CreatedBy uuid.UUID `json:"created_by"`
}
// ParsedItem represents a single parsed item from import
type ParsedItem struct {
RowNumber int `json:"row_number"`
IsValid bool `json:"is_valid"`
Errors []string `json:"errors,omitempty"`
Warnings []string `json:"warnings,omitempty"`
// Extracted data
Data RoadmapItemInput `json:"data"`
// Auto-mapping results
MatchedControl string `json:"matched_control,omitempty"`
MatchedRegulation string `json:"matched_regulation,omitempty"`
MatchedGap string `json:"matched_gap,omitempty"`
MatchConfidence float64 `json:"match_confidence,omitempty"` // 0.0 - 1.0
}
// RoadmapItemInput represents input for creating/updating a roadmap item
type RoadmapItemInput struct {
Title string `json:"title"`
Description string `json:"description,omitempty"`
Category ItemCategory `json:"category,omitempty"`
Priority ItemPriority `json:"priority,omitempty"`
Status ItemStatus `json:"status,omitempty"`
ControlID string `json:"control_id,omitempty"`
RegulationRef string `json:"regulation_ref,omitempty"`
GapID string `json:"gap_id,omitempty"`
EffortDays *int `json:"effort_days,omitempty"`
AssigneeName string `json:"assignee_name,omitempty"`
Department string `json:"department,omitempty"`
PlannedStart *time.Time `json:"planned_start,omitempty"`
PlannedEnd *time.Time `json:"planned_end,omitempty"`
Notes string `json:"notes,omitempty"`
}
// ============================================================================
// API Request/Response Types
// ============================================================================
// CreateRoadmapRequest is the API request for creating a roadmap
type CreateRoadmapRequest struct {
Title string `json:"title"`
Description string `json:"description,omitempty"`
AssessmentID *uuid.UUID `json:"assessment_id,omitempty"`
PortfolioID *uuid.UUID `json:"portfolio_id,omitempty"`
StartDate *time.Time `json:"start_date,omitempty"`
TargetDate *time.Time `json:"target_date,omitempty"`
}
// CreateRoadmapResponse is the API response for creating a roadmap
type CreateRoadmapResponse struct {
Roadmap Roadmap `json:"roadmap"`
}
// ImportUploadResponse is returned after uploading a file for import
type ImportUploadResponse struct {
JobID uuid.UUID `json:"job_id"`
Filename string `json:"filename"`
Format string `json:"format"`
Status string `json:"status"`
Message string `json:"message"`
}
// ImportParseResponse is returned after parsing the uploaded file
type ImportParseResponse struct {
JobID uuid.UUID `json:"job_id"`
Status string `json:"status"`
TotalRows int `json:"total_rows"`
ValidRows int `json:"valid_rows"`
InvalidRows int `json:"invalid_rows"`
Items []ParsedItem `json:"items"`
ColumnMap map[string]string `json:"column_map"` // Detected column mappings
}
// ImportConfirmRequest is the request to confirm and execute import
type ImportConfirmRequest struct {
JobID uuid.UUID `json:"job_id"`
RoadmapID *uuid.UUID `json:"roadmap_id,omitempty"` // Target roadmap (nil = create new)
RoadmapTitle string `json:"roadmap_title,omitempty"` // If creating new
SelectedRows []int `json:"selected_rows,omitempty"` // Specific rows to import (nil = all valid)
ApplyMappings bool `json:"apply_mappings"` // Apply auto-detected control/regulation mappings
}
// ImportConfirmResponse is returned after confirming import
type ImportConfirmResponse struct {
RoadmapID uuid.UUID `json:"roadmap_id"`
ImportedItems int `json:"imported_items"`
SkippedItems int `json:"skipped_items"`
Message string `json:"message"`
}
// RoadmapFilters defines filters for listing roadmaps
type RoadmapFilters struct {
Status string
AssessmentID *uuid.UUID
PortfolioID *uuid.UUID
Limit int
Offset int
}
// RoadmapItemFilters defines filters for listing roadmap items
type RoadmapItemFilters struct {
Status ItemStatus
Priority ItemPriority
Category ItemCategory
AssigneeID *uuid.UUID
ControlID string
SearchQuery string
Limit int
Offset int
}
// RoadmapStats contains statistics for a roadmap
type RoadmapStats struct {
TotalItems int `json:"total_items"`
ByStatus map[string]int `json:"by_status"`
ByPriority map[string]int `json:"by_priority"`
ByCategory map[string]int `json:"by_category"`
ByDepartment map[string]int `json:"by_department"`
OverdueItems int `json:"overdue_items"`
UpcomingItems int `json:"upcoming_items"` // Due in next 7 days
TotalEffortDays int `json:"total_effort_days"`
Progress int `json:"progress"`
}

View File

@@ -0,0 +1,540 @@
package roadmap
import (
"bytes"
"encoding/csv"
"encoding/json"
"fmt"
"strconv"
"strings"
"time"
"github.com/xuri/excelize/v2"
)
// Parser handles file parsing for roadmap imports
type Parser struct{}
// NewParser creates a new parser
func NewParser() *Parser {
return &Parser{}
}
// ColumnMapping defines expected column names and their variations
var ColumnMapping = map[string][]string{
"title": {"title", "titel", "name", "bezeichnung", "massnahme", "maßnahme", "aufgabe", "task"},
"description": {"description", "beschreibung", "details", "inhalt", "content"},
"category": {"category", "kategorie", "bereich", "type", "typ"},
"priority": {"priority", "priorität", "prioritaet", "prio", "dringlichkeit"},
"status": {"status", "stand", "zustand"},
"control_id": {"control_id", "control", "kontrolle", "massnahme_id", "ctrl"},
"regulation_ref": {"regulation", "regulation_ref", "verordnung", "gesetz", "artikel", "article", "gdpr_ref"},
"gap_id": {"gap_id", "gap", "luecke", "lücke"},
"effort_days": {"effort_days", "effort", "aufwand", "tage", "days", "pt", "personentage"},
"assignee": {"assignee", "verantwortlich", "zustaendig", "zuständig", "owner", "responsible"},
"department": {"department", "abteilung", "bereich", "team"},
"planned_start": {"planned_start", "start", "beginn", "startdatum", "start_date"},
"planned_end": {"planned_end", "end", "ende", "enddatum", "end_date", "deadline", "frist"},
"notes": {"notes", "notizen", "bemerkungen", "kommentar", "comment", "anmerkungen"},
}
// DetectedColumn represents a detected column mapping
type DetectedColumn struct {
Index int `json:"index"`
Header string `json:"header"`
MappedTo string `json:"mapped_to"`
Confidence float64 `json:"confidence"`
}
// ParseResult contains the result of parsing a file
type ParseResult struct {
Format ImportFormat `json:"format"`
TotalRows int `json:"total_rows"`
ValidRows int `json:"valid_rows"`
InvalidRows int `json:"invalid_rows"`
Columns []DetectedColumn `json:"columns"`
Items []ParsedItem `json:"items"`
Errors []string `json:"errors"`
}
// ParseFile detects format and parses the file
func (p *Parser) ParseFile(data []byte, filename string, contentType string) (*ParseResult, error) {
format := p.detectFormat(filename, contentType)
switch format {
case ImportFormatExcel:
return p.parseExcel(data)
case ImportFormatCSV:
return p.parseCSV(data)
case ImportFormatJSON:
return p.parseJSON(data)
default:
return nil, fmt.Errorf("unsupported file format: %s", filename)
}
}
// detectFormat detects the file format
func (p *Parser) detectFormat(filename string, contentType string) ImportFormat {
filename = strings.ToLower(filename)
if strings.HasSuffix(filename, ".xlsx") || strings.HasSuffix(filename, ".xls") {
return ImportFormatExcel
}
if strings.HasSuffix(filename, ".csv") {
return ImportFormatCSV
}
if strings.HasSuffix(filename, ".json") {
return ImportFormatJSON
}
// Check content type
switch contentType {
case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"application/vnd.ms-excel":
return ImportFormatExcel
case "text/csv":
return ImportFormatCSV
case "application/json":
return ImportFormatJSON
}
return ""
}
// parseExcel parses an Excel file
func (p *Parser) parseExcel(data []byte) (*ParseResult, error) {
result := &ParseResult{
Format: ImportFormatExcel,
}
f, err := excelize.OpenReader(bytes.NewReader(data))
if err != nil {
return nil, fmt.Errorf("failed to open Excel file: %w", err)
}
defer f.Close()
// Get the first sheet
sheets := f.GetSheetList()
if len(sheets) == 0 {
return nil, fmt.Errorf("no sheets found in Excel file")
}
rows, err := f.GetRows(sheets[0])
if err != nil {
return nil, fmt.Errorf("failed to read rows: %w", err)
}
if len(rows) < 2 {
return nil, fmt.Errorf("file must have at least a header row and one data row")
}
// Detect column mappings from header
headers := rows[0]
result.Columns = p.detectColumns(headers)
// Parse data rows
for i, row := range rows[1:] {
rowNum := i + 2 // 1-based, skip header
item := p.parseRow(row, result.Columns, rowNum)
result.Items = append(result.Items, item)
result.TotalRows++
if item.IsValid {
result.ValidRows++
} else {
result.InvalidRows++
}
}
return result, nil
}
// parseCSV parses a CSV file
func (p *Parser) parseCSV(data []byte) (*ParseResult, error) {
result := &ParseResult{
Format: ImportFormatCSV,
}
reader := csv.NewReader(bytes.NewReader(data))
reader.LazyQuotes = true
reader.TrimLeadingSpace = true
// Try different delimiters
delimiters := []rune{',', ';', '\t'}
var records [][]string
var err error
for _, delim := range delimiters {
reader = csv.NewReader(bytes.NewReader(data))
reader.Comma = delim
reader.LazyQuotes = true
records, err = reader.ReadAll()
if err == nil && len(records) > 0 && len(records[0]) > 1 {
break
}
}
if err != nil {
return nil, fmt.Errorf("failed to parse CSV: %w", err)
}
if len(records) < 2 {
return nil, fmt.Errorf("file must have at least a header row and one data row")
}
// Detect column mappings from header
headers := records[0]
result.Columns = p.detectColumns(headers)
// Parse data rows
for i, row := range records[1:] {
rowNum := i + 2
item := p.parseRow(row, result.Columns, rowNum)
result.Items = append(result.Items, item)
result.TotalRows++
if item.IsValid {
result.ValidRows++
} else {
result.InvalidRows++
}
}
return result, nil
}
// parseJSON parses a JSON file
func (p *Parser) parseJSON(data []byte) (*ParseResult, error) {
result := &ParseResult{
Format: ImportFormatJSON,
}
// Try parsing as array of items
var items []map[string]interface{}
if err := json.Unmarshal(data, &items); err != nil {
// Try parsing as object with items array
var wrapper struct {
Items []map[string]interface{} `json:"items"`
}
if err := json.Unmarshal(data, &wrapper); err != nil {
return nil, fmt.Errorf("failed to parse JSON: %w", err)
}
items = wrapper.Items
}
if len(items) == 0 {
return nil, fmt.Errorf("no items found in JSON file")
}
// Detect columns from first item
headers := make([]string, 0)
for key := range items[0] {
headers = append(headers, key)
}
result.Columns = p.detectColumns(headers)
// Parse items
for i, itemMap := range items {
rowNum := i + 1
// Convert map to row slice
row := make([]string, len(result.Columns))
for j, col := range result.Columns {
if val, ok := itemMap[col.Header]; ok {
row[j] = fmt.Sprintf("%v", val)
}
}
item := p.parseRow(row, result.Columns, rowNum)
result.Items = append(result.Items, item)
result.TotalRows++
if item.IsValid {
result.ValidRows++
} else {
result.InvalidRows++
}
}
return result, nil
}
// detectColumns detects column mappings from headers
func (p *Parser) detectColumns(headers []string) []DetectedColumn {
columns := make([]DetectedColumn, len(headers))
for i, header := range headers {
columns[i] = DetectedColumn{
Index: i,
Header: header,
Confidence: 0,
}
headerLower := strings.ToLower(strings.TrimSpace(header))
// Try to match against known column names
for fieldName, variations := range ColumnMapping {
for _, variation := range variations {
if headerLower == variation || strings.Contains(headerLower, variation) {
if headerLower == variation {
columns[i].MappedTo = fieldName
columns[i].Confidence = 1.0
} else if columns[i].Confidence < 0.8 {
columns[i].MappedTo = fieldName
columns[i].Confidence = 0.8
}
break
}
}
if columns[i].Confidence >= 1.0 {
break
}
}
}
return columns
}
// parseRow parses a single row into a ParsedItem
func (p *Parser) parseRow(row []string, columns []DetectedColumn, rowNum int) ParsedItem {
item := ParsedItem{
RowNumber: rowNum,
IsValid: true,
Data: RoadmapItemInput{},
}
// Build a map for easy access
values := make(map[string]string)
for i, col := range columns {
if i < len(row) && col.MappedTo != "" {
values[col.MappedTo] = strings.TrimSpace(row[i])
}
}
// Extract title (required)
if title, ok := values["title"]; ok && title != "" {
item.Data.Title = title
} else {
item.IsValid = false
item.Errors = append(item.Errors, "Titel/Title ist erforderlich")
}
// Extract optional fields
if desc, ok := values["description"]; ok {
item.Data.Description = desc
}
// Category
if cat, ok := values["category"]; ok && cat != "" {
item.Data.Category = p.parseCategory(cat)
if item.Data.Category == "" {
item.Warnings = append(item.Warnings, fmt.Sprintf("Unbekannte Kategorie: %s", cat))
item.Data.Category = ItemCategoryTechnical
}
}
// Priority
if prio, ok := values["priority"]; ok && prio != "" {
item.Data.Priority = p.parsePriority(prio)
if item.Data.Priority == "" {
item.Warnings = append(item.Warnings, fmt.Sprintf("Unbekannte Priorität: %s", prio))
item.Data.Priority = ItemPriorityMedium
}
}
// Status
if status, ok := values["status"]; ok && status != "" {
item.Data.Status = p.parseStatus(status)
if item.Data.Status == "" {
item.Warnings = append(item.Warnings, fmt.Sprintf("Unbekannter Status: %s", status))
item.Data.Status = ItemStatusPlanned
}
}
// Control ID
if ctrl, ok := values["control_id"]; ok {
item.Data.ControlID = ctrl
}
// Regulation reference
if reg, ok := values["regulation_ref"]; ok {
item.Data.RegulationRef = reg
}
// Gap ID
if gap, ok := values["gap_id"]; ok {
item.Data.GapID = gap
}
// Effort
if effort, ok := values["effort_days"]; ok && effort != "" {
if days, err := strconv.Atoi(effort); err == nil {
item.Data.EffortDays = &days
}
}
// Assignee
if assignee, ok := values["assignee"]; ok {
item.Data.AssigneeName = assignee
}
// Department
if dept, ok := values["department"]; ok {
item.Data.Department = dept
}
// Dates
if startStr, ok := values["planned_start"]; ok && startStr != "" {
if start := p.parseDate(startStr); start != nil {
item.Data.PlannedStart = start
}
}
if endStr, ok := values["planned_end"]; ok && endStr != "" {
if end := p.parseDate(endStr); end != nil {
item.Data.PlannedEnd = end
}
}
// Notes
if notes, ok := values["notes"]; ok {
item.Data.Notes = notes
}
return item
}
// parseCategory converts a string to ItemCategory
func (p *Parser) parseCategory(s string) ItemCategory {
s = strings.ToLower(strings.TrimSpace(s))
switch {
case strings.Contains(s, "tech"):
return ItemCategoryTechnical
case strings.Contains(s, "org"):
return ItemCategoryOrganizational
case strings.Contains(s, "proz") || strings.Contains(s, "process"):
return ItemCategoryProcessual
case strings.Contains(s, "dok") || strings.Contains(s, "doc"):
return ItemCategoryDocumentation
case strings.Contains(s, "train") || strings.Contains(s, "schul"):
return ItemCategoryTraining
default:
return ""
}
}
// parsePriority converts a string to ItemPriority
func (p *Parser) parsePriority(s string) ItemPriority {
s = strings.ToLower(strings.TrimSpace(s))
switch {
case strings.Contains(s, "crit") || strings.Contains(s, "krit") || s == "1":
return ItemPriorityCritical
case strings.Contains(s, "high") || strings.Contains(s, "hoch") || s == "2":
return ItemPriorityHigh
case strings.Contains(s, "med") || strings.Contains(s, "mitt") || s == "3":
return ItemPriorityMedium
case strings.Contains(s, "low") || strings.Contains(s, "nied") || s == "4":
return ItemPriorityLow
default:
return ""
}
}
// parseStatus converts a string to ItemStatus
func (p *Parser) parseStatus(s string) ItemStatus {
s = strings.ToLower(strings.TrimSpace(s))
switch {
case strings.Contains(s, "plan") || strings.Contains(s, "offen") || strings.Contains(s, "open"):
return ItemStatusPlanned
case strings.Contains(s, "progress") || strings.Contains(s, "lauf") || strings.Contains(s, "arbeit"):
return ItemStatusInProgress
case strings.Contains(s, "block") || strings.Contains(s, "wart"):
return ItemStatusBlocked
case strings.Contains(s, "complet") || strings.Contains(s, "done") || strings.Contains(s, "fertig") || strings.Contains(s, "erledigt"):
return ItemStatusCompleted
case strings.Contains(s, "defer") || strings.Contains(s, "zurück") || strings.Contains(s, "verschob"):
return ItemStatusDeferred
default:
return ""
}
}
// parseDate attempts to parse various date formats
func (p *Parser) parseDate(s string) *time.Time {
s = strings.TrimSpace(s)
if s == "" {
return nil
}
formats := []string{
"2006-01-02",
"02.01.2006",
"2.1.2006",
"02/01/2006",
"2/1/2006",
"01/02/2006",
"1/2/2006",
"2006/01/02",
time.RFC3339,
}
for _, format := range formats {
if t, err := time.Parse(format, s); err == nil {
return &t
}
}
return nil
}
// ValidateAndEnrich validates parsed items and enriches them with mappings
func (p *Parser) ValidateAndEnrich(items []ParsedItem, controls []string, regulations []string, gaps []string) []ParsedItem {
// Build lookup maps
controlSet := make(map[string]bool)
for _, c := range controls {
controlSet[strings.ToLower(c)] = true
}
regSet := make(map[string]bool)
for _, r := range regulations {
regSet[strings.ToLower(r)] = true
}
gapSet := make(map[string]bool)
for _, g := range gaps {
gapSet[strings.ToLower(g)] = true
}
for i := range items {
item := &items[i]
// Validate control ID
if item.Data.ControlID != "" {
if controlSet[strings.ToLower(item.Data.ControlID)] {
item.MatchedControl = item.Data.ControlID
item.MatchConfidence = 1.0
} else {
item.Warnings = append(item.Warnings, fmt.Sprintf("Control '%s' nicht im Katalog gefunden", item.Data.ControlID))
}
}
// Validate regulation reference
if item.Data.RegulationRef != "" {
if regSet[strings.ToLower(item.Data.RegulationRef)] {
item.MatchedRegulation = item.Data.RegulationRef
}
}
// Validate gap ID
if item.Data.GapID != "" {
if gapSet[strings.ToLower(item.Data.GapID)] {
item.MatchedGap = item.Data.GapID
} else {
item.Warnings = append(item.Warnings, fmt.Sprintf("Gap '%s' nicht im Mapping gefunden", item.Data.GapID))
}
}
}
return items
}

View File

@@ -0,0 +1,757 @@
package roadmap
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgxpool"
)
// Store handles roadmap data persistence
type Store struct {
pool *pgxpool.Pool
}
// NewStore creates a new roadmap store
func NewStore(pool *pgxpool.Pool) *Store {
return &Store{pool: pool}
}
// ============================================================================
// Roadmap CRUD Operations
// ============================================================================
// CreateRoadmap creates a new roadmap
func (s *Store) CreateRoadmap(ctx context.Context, r *Roadmap) error {
r.ID = uuid.New()
r.CreatedAt = time.Now().UTC()
r.UpdatedAt = r.CreatedAt
if r.Status == "" {
r.Status = "draft"
}
if r.Version == "" {
r.Version = "1.0"
}
_, err := s.pool.Exec(ctx, `
INSERT INTO roadmaps (
id, tenant_id, namespace_id, title, description, version,
assessment_id, portfolio_id, status,
total_items, completed_items, progress,
start_date, target_date,
created_at, updated_at, created_by
) VALUES (
$1, $2, $3, $4, $5, $6,
$7, $8, $9,
$10, $11, $12,
$13, $14,
$15, $16, $17
)
`,
r.ID, r.TenantID, r.NamespaceID, r.Title, r.Description, r.Version,
r.AssessmentID, r.PortfolioID, r.Status,
r.TotalItems, r.CompletedItems, r.Progress,
r.StartDate, r.TargetDate,
r.CreatedAt, r.UpdatedAt, r.CreatedBy,
)
return err
}
// GetRoadmap retrieves a roadmap by ID
func (s *Store) GetRoadmap(ctx context.Context, id uuid.UUID) (*Roadmap, error) {
var r Roadmap
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, namespace_id, title, description, version,
assessment_id, portfolio_id, status,
total_items, completed_items, progress,
start_date, target_date,
created_at, updated_at, created_by
FROM roadmaps WHERE id = $1
`, id).Scan(
&r.ID, &r.TenantID, &r.NamespaceID, &r.Title, &r.Description, &r.Version,
&r.AssessmentID, &r.PortfolioID, &r.Status,
&r.TotalItems, &r.CompletedItems, &r.Progress,
&r.StartDate, &r.TargetDate,
&r.CreatedAt, &r.UpdatedAt, &r.CreatedBy,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return &r, nil
}
// ListRoadmaps lists roadmaps for a tenant with optional filters
func (s *Store) ListRoadmaps(ctx context.Context, tenantID uuid.UUID, filters *RoadmapFilters) ([]Roadmap, error) {
query := `
SELECT
id, tenant_id, namespace_id, title, description, version,
assessment_id, portfolio_id, status,
total_items, completed_items, progress,
start_date, target_date,
created_at, updated_at, created_by
FROM roadmaps WHERE tenant_id = $1`
args := []interface{}{tenantID}
argIdx := 2
if filters != nil {
if filters.Status != "" {
query += fmt.Sprintf(" AND status = $%d", argIdx)
args = append(args, filters.Status)
argIdx++
}
if filters.AssessmentID != nil {
query += fmt.Sprintf(" AND assessment_id = $%d", argIdx)
args = append(args, *filters.AssessmentID)
argIdx++
}
if filters.PortfolioID != nil {
query += fmt.Sprintf(" AND portfolio_id = $%d", argIdx)
args = append(args, *filters.PortfolioID)
argIdx++
}
}
query += " ORDER BY created_at DESC"
if filters != nil && filters.Limit > 0 {
query += fmt.Sprintf(" LIMIT $%d", argIdx)
args = append(args, filters.Limit)
argIdx++
if filters.Offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIdx)
args = append(args, filters.Offset)
}
}
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, err
}
defer rows.Close()
var roadmaps []Roadmap
for rows.Next() {
var r Roadmap
err := rows.Scan(
&r.ID, &r.TenantID, &r.NamespaceID, &r.Title, &r.Description, &r.Version,
&r.AssessmentID, &r.PortfolioID, &r.Status,
&r.TotalItems, &r.CompletedItems, &r.Progress,
&r.StartDate, &r.TargetDate,
&r.CreatedAt, &r.UpdatedAt, &r.CreatedBy,
)
if err != nil {
return nil, err
}
roadmaps = append(roadmaps, r)
}
return roadmaps, nil
}
// UpdateRoadmap updates a roadmap
func (s *Store) UpdateRoadmap(ctx context.Context, r *Roadmap) error {
r.UpdatedAt = time.Now().UTC()
_, err := s.pool.Exec(ctx, `
UPDATE roadmaps SET
title = $2, description = $3, version = $4,
assessment_id = $5, portfolio_id = $6, status = $7,
total_items = $8, completed_items = $9, progress = $10,
start_date = $11, target_date = $12,
updated_at = $13
WHERE id = $1
`,
r.ID, r.Title, r.Description, r.Version,
r.AssessmentID, r.PortfolioID, r.Status,
r.TotalItems, r.CompletedItems, r.Progress,
r.StartDate, r.TargetDate,
r.UpdatedAt,
)
return err
}
// DeleteRoadmap deletes a roadmap and its items
func (s *Store) DeleteRoadmap(ctx context.Context, id uuid.UUID) error {
// Delete items first
_, err := s.pool.Exec(ctx, "DELETE FROM roadmap_items WHERE roadmap_id = $1", id)
if err != nil {
return err
}
// Delete roadmap
_, err = s.pool.Exec(ctx, "DELETE FROM roadmaps WHERE id = $1", id)
return err
}
// ============================================================================
// RoadmapItem CRUD Operations
// ============================================================================
// CreateItem creates a new roadmap item
func (s *Store) CreateItem(ctx context.Context, item *RoadmapItem) error {
item.ID = uuid.New()
item.CreatedAt = time.Now().UTC()
item.UpdatedAt = item.CreatedAt
if item.Status == "" {
item.Status = ItemStatusPlanned
}
if item.Priority == "" {
item.Priority = ItemPriorityMedium
}
if item.Category == "" {
item.Category = ItemCategoryTechnical
}
dependsOn, _ := json.Marshal(item.DependsOn)
blockedBy, _ := json.Marshal(item.BlockedBy)
evidenceReq, _ := json.Marshal(item.EvidenceRequired)
evidenceProv, _ := json.Marshal(item.EvidenceProvided)
_, err := s.pool.Exec(ctx, `
INSERT INTO roadmap_items (
id, roadmap_id, title, description, category, priority, status,
control_id, regulation_ref, gap_id,
effort_days, effort_hours, estimated_cost,
assignee_id, assignee_name, department,
planned_start, planned_end, actual_start, actual_end,
depends_on, blocked_by,
evidence_required, evidence_provided,
notes, risk_notes,
source_row, source_file, sort_order,
created_at, updated_at
) VALUES (
$1, $2, $3, $4, $5, $6, $7,
$8, $9, $10,
$11, $12, $13,
$14, $15, $16,
$17, $18, $19, $20,
$21, $22,
$23, $24,
$25, $26,
$27, $28, $29,
$30, $31
)
`,
item.ID, item.RoadmapID, item.Title, item.Description, string(item.Category), string(item.Priority), string(item.Status),
item.ControlID, item.RegulationRef, item.GapID,
item.EffortDays, item.EffortHours, item.EstimatedCost,
item.AssigneeID, item.AssigneeName, item.Department,
item.PlannedStart, item.PlannedEnd, item.ActualStart, item.ActualEnd,
dependsOn, blockedBy,
evidenceReq, evidenceProv,
item.Notes, item.RiskNotes,
item.SourceRow, item.SourceFile, item.SortOrder,
item.CreatedAt, item.UpdatedAt,
)
return err
}
// GetItem retrieves a roadmap item by ID
func (s *Store) GetItem(ctx context.Context, id uuid.UUID) (*RoadmapItem, error) {
var item RoadmapItem
var category, priority, status string
var dependsOn, blockedBy, evidenceReq, evidenceProv []byte
err := s.pool.QueryRow(ctx, `
SELECT
id, roadmap_id, title, description, category, priority, status,
control_id, regulation_ref, gap_id,
effort_days, effort_hours, estimated_cost,
assignee_id, assignee_name, department,
planned_start, planned_end, actual_start, actual_end,
depends_on, blocked_by,
evidence_required, evidence_provided,
notes, risk_notes,
source_row, source_file, sort_order,
created_at, updated_at
FROM roadmap_items WHERE id = $1
`, id).Scan(
&item.ID, &item.RoadmapID, &item.Title, &item.Description, &category, &priority, &status,
&item.ControlID, &item.RegulationRef, &item.GapID,
&item.EffortDays, &item.EffortHours, &item.EstimatedCost,
&item.AssigneeID, &item.AssigneeName, &item.Department,
&item.PlannedStart, &item.PlannedEnd, &item.ActualStart, &item.ActualEnd,
&dependsOn, &blockedBy,
&evidenceReq, &evidenceProv,
&item.Notes, &item.RiskNotes,
&item.SourceRow, &item.SourceFile, &item.SortOrder,
&item.CreatedAt, &item.UpdatedAt,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
item.Category = ItemCategory(category)
item.Priority = ItemPriority(priority)
item.Status = ItemStatus(status)
json.Unmarshal(dependsOn, &item.DependsOn)
json.Unmarshal(blockedBy, &item.BlockedBy)
json.Unmarshal(evidenceReq, &item.EvidenceRequired)
json.Unmarshal(evidenceProv, &item.EvidenceProvided)
return &item, nil
}
// ListItems lists items for a roadmap with optional filters
func (s *Store) ListItems(ctx context.Context, roadmapID uuid.UUID, filters *RoadmapItemFilters) ([]RoadmapItem, error) {
query := `
SELECT
id, roadmap_id, title, description, category, priority, status,
control_id, regulation_ref, gap_id,
effort_days, effort_hours, estimated_cost,
assignee_id, assignee_name, department,
planned_start, planned_end, actual_start, actual_end,
depends_on, blocked_by,
evidence_required, evidence_provided,
notes, risk_notes,
source_row, source_file, sort_order,
created_at, updated_at
FROM roadmap_items WHERE roadmap_id = $1`
args := []interface{}{roadmapID}
argIdx := 2
if filters != nil {
if filters.Status != "" {
query += fmt.Sprintf(" AND status = $%d", argIdx)
args = append(args, string(filters.Status))
argIdx++
}
if filters.Priority != "" {
query += fmt.Sprintf(" AND priority = $%d", argIdx)
args = append(args, string(filters.Priority))
argIdx++
}
if filters.Category != "" {
query += fmt.Sprintf(" AND category = $%d", argIdx)
args = append(args, string(filters.Category))
argIdx++
}
if filters.AssigneeID != nil {
query += fmt.Sprintf(" AND assignee_id = $%d", argIdx)
args = append(args, *filters.AssigneeID)
argIdx++
}
if filters.ControlID != "" {
query += fmt.Sprintf(" AND control_id = $%d", argIdx)
args = append(args, filters.ControlID)
argIdx++
}
if filters.SearchQuery != "" {
query += fmt.Sprintf(" AND (title ILIKE $%d OR description ILIKE $%d)", argIdx, argIdx)
args = append(args, "%"+filters.SearchQuery+"%")
argIdx++
}
}
query += " ORDER BY sort_order ASC, priority ASC, created_at ASC"
if filters != nil && filters.Limit > 0 {
query += fmt.Sprintf(" LIMIT $%d", argIdx)
args = append(args, filters.Limit)
argIdx++
if filters.Offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIdx)
args = append(args, filters.Offset)
}
}
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, err
}
defer rows.Close()
var items []RoadmapItem
for rows.Next() {
var item RoadmapItem
var category, priority, status string
var dependsOn, blockedBy, evidenceReq, evidenceProv []byte
err := rows.Scan(
&item.ID, &item.RoadmapID, &item.Title, &item.Description, &category, &priority, &status,
&item.ControlID, &item.RegulationRef, &item.GapID,
&item.EffortDays, &item.EffortHours, &item.EstimatedCost,
&item.AssigneeID, &item.AssigneeName, &item.Department,
&item.PlannedStart, &item.PlannedEnd, &item.ActualStart, &item.ActualEnd,
&dependsOn, &blockedBy,
&evidenceReq, &evidenceProv,
&item.Notes, &item.RiskNotes,
&item.SourceRow, &item.SourceFile, &item.SortOrder,
&item.CreatedAt, &item.UpdatedAt,
)
if err != nil {
return nil, err
}
item.Category = ItemCategory(category)
item.Priority = ItemPriority(priority)
item.Status = ItemStatus(status)
json.Unmarshal(dependsOn, &item.DependsOn)
json.Unmarshal(blockedBy, &item.BlockedBy)
json.Unmarshal(evidenceReq, &item.EvidenceRequired)
json.Unmarshal(evidenceProv, &item.EvidenceProvided)
items = append(items, item)
}
return items, nil
}
// UpdateItem updates a roadmap item
func (s *Store) UpdateItem(ctx context.Context, item *RoadmapItem) error {
item.UpdatedAt = time.Now().UTC()
dependsOn, _ := json.Marshal(item.DependsOn)
blockedBy, _ := json.Marshal(item.BlockedBy)
evidenceReq, _ := json.Marshal(item.EvidenceRequired)
evidenceProv, _ := json.Marshal(item.EvidenceProvided)
_, err := s.pool.Exec(ctx, `
UPDATE roadmap_items SET
title = $2, description = $3, category = $4, priority = $5, status = $6,
control_id = $7, regulation_ref = $8, gap_id = $9,
effort_days = $10, effort_hours = $11, estimated_cost = $12,
assignee_id = $13, assignee_name = $14, department = $15,
planned_start = $16, planned_end = $17, actual_start = $18, actual_end = $19,
depends_on = $20, blocked_by = $21,
evidence_required = $22, evidence_provided = $23,
notes = $24, risk_notes = $25,
sort_order = $26, updated_at = $27
WHERE id = $1
`,
item.ID, item.Title, item.Description, string(item.Category), string(item.Priority), string(item.Status),
item.ControlID, item.RegulationRef, item.GapID,
item.EffortDays, item.EffortHours, item.EstimatedCost,
item.AssigneeID, item.AssigneeName, item.Department,
item.PlannedStart, item.PlannedEnd, item.ActualStart, item.ActualEnd,
dependsOn, blockedBy,
evidenceReq, evidenceProv,
item.Notes, item.RiskNotes,
item.SortOrder, item.UpdatedAt,
)
return err
}
// DeleteItem deletes a roadmap item
func (s *Store) DeleteItem(ctx context.Context, id uuid.UUID) error {
_, err := s.pool.Exec(ctx, "DELETE FROM roadmap_items WHERE id = $1", id)
return err
}
// BulkCreateItems creates multiple items in a transaction
func (s *Store) BulkCreateItems(ctx context.Context, items []RoadmapItem) error {
tx, err := s.pool.Begin(ctx)
if err != nil {
return err
}
defer tx.Rollback(ctx)
for i := range items {
item := &items[i]
item.ID = uuid.New()
item.CreatedAt = time.Now().UTC()
item.UpdatedAt = item.CreatedAt
dependsOn, _ := json.Marshal(item.DependsOn)
blockedBy, _ := json.Marshal(item.BlockedBy)
evidenceReq, _ := json.Marshal(item.EvidenceRequired)
evidenceProv, _ := json.Marshal(item.EvidenceProvided)
_, err := tx.Exec(ctx, `
INSERT INTO roadmap_items (
id, roadmap_id, title, description, category, priority, status,
control_id, regulation_ref, gap_id,
effort_days, effort_hours, estimated_cost,
assignee_id, assignee_name, department,
planned_start, planned_end, actual_start, actual_end,
depends_on, blocked_by,
evidence_required, evidence_provided,
notes, risk_notes,
source_row, source_file, sort_order,
created_at, updated_at
) VALUES (
$1, $2, $3, $4, $5, $6, $7,
$8, $9, $10,
$11, $12, $13,
$14, $15, $16,
$17, $18, $19, $20,
$21, $22,
$23, $24,
$25, $26,
$27, $28, $29,
$30, $31
)
`,
item.ID, item.RoadmapID, item.Title, item.Description, string(item.Category), string(item.Priority), string(item.Status),
item.ControlID, item.RegulationRef, item.GapID,
item.EffortDays, item.EffortHours, item.EstimatedCost,
item.AssigneeID, item.AssigneeName, item.Department,
item.PlannedStart, item.PlannedEnd, item.ActualStart, item.ActualEnd,
dependsOn, blockedBy,
evidenceReq, evidenceProv,
item.Notes, item.RiskNotes,
item.SourceRow, item.SourceFile, item.SortOrder,
item.CreatedAt, item.UpdatedAt,
)
if err != nil {
return err
}
}
return tx.Commit(ctx)
}
// ============================================================================
// Import Job Operations
// ============================================================================
// CreateImportJob creates a new import job
func (s *Store) CreateImportJob(ctx context.Context, job *ImportJob) error {
job.ID = uuid.New()
job.CreatedAt = time.Now().UTC()
job.UpdatedAt = job.CreatedAt
if job.Status == "" {
job.Status = "pending"
}
parsedItems, _ := json.Marshal(job.ParsedItems)
_, err := s.pool.Exec(ctx, `
INSERT INTO roadmap_import_jobs (
id, tenant_id, roadmap_id,
filename, format, file_size, content_type,
status, error_message,
total_rows, valid_rows, invalid_rows, imported_items,
parsed_items,
created_at, updated_at, completed_at, created_by
) VALUES (
$1, $2, $3,
$4, $5, $6, $7,
$8, $9,
$10, $11, $12, $13,
$14,
$15, $16, $17, $18
)
`,
job.ID, job.TenantID, job.RoadmapID,
job.Filename, string(job.Format), job.FileSize, job.ContentType,
job.Status, job.ErrorMessage,
job.TotalRows, job.ValidRows, job.InvalidRows, job.ImportedItems,
parsedItems,
job.CreatedAt, job.UpdatedAt, job.CompletedAt, job.CreatedBy,
)
return err
}
// GetImportJob retrieves an import job by ID
func (s *Store) GetImportJob(ctx context.Context, id uuid.UUID) (*ImportJob, error) {
var job ImportJob
var format string
var parsedItems []byte
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, roadmap_id,
filename, format, file_size, content_type,
status, error_message,
total_rows, valid_rows, invalid_rows, imported_items,
parsed_items,
created_at, updated_at, completed_at, created_by
FROM roadmap_import_jobs WHERE id = $1
`, id).Scan(
&job.ID, &job.TenantID, &job.RoadmapID,
&job.Filename, &format, &job.FileSize, &job.ContentType,
&job.Status, &job.ErrorMessage,
&job.TotalRows, &job.ValidRows, &job.InvalidRows, &job.ImportedItems,
&parsedItems,
&job.CreatedAt, &job.UpdatedAt, &job.CompletedAt, &job.CreatedBy,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
job.Format = ImportFormat(format)
json.Unmarshal(parsedItems, &job.ParsedItems)
return &job, nil
}
// UpdateImportJob updates an import job
func (s *Store) UpdateImportJob(ctx context.Context, job *ImportJob) error {
job.UpdatedAt = time.Now().UTC()
parsedItems, _ := json.Marshal(job.ParsedItems)
_, err := s.pool.Exec(ctx, `
UPDATE roadmap_import_jobs SET
roadmap_id = $2,
status = $3, error_message = $4,
total_rows = $5, valid_rows = $6, invalid_rows = $7, imported_items = $8,
parsed_items = $9,
updated_at = $10, completed_at = $11
WHERE id = $1
`,
job.ID, job.RoadmapID,
job.Status, job.ErrorMessage,
job.TotalRows, job.ValidRows, job.InvalidRows, job.ImportedItems,
parsedItems,
job.UpdatedAt, job.CompletedAt,
)
return err
}
// ============================================================================
// Statistics
// ============================================================================
// GetRoadmapStats returns statistics for a roadmap
func (s *Store) GetRoadmapStats(ctx context.Context, roadmapID uuid.UUID) (*RoadmapStats, error) {
stats := &RoadmapStats{
ByStatus: make(map[string]int),
ByPriority: make(map[string]int),
ByCategory: make(map[string]int),
ByDepartment: make(map[string]int),
}
// Total count
s.pool.QueryRow(ctx,
"SELECT COUNT(*) FROM roadmap_items WHERE roadmap_id = $1",
roadmapID).Scan(&stats.TotalItems)
// By status
rows, err := s.pool.Query(ctx,
"SELECT status, COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 GROUP BY status",
roadmapID)
if err == nil {
defer rows.Close()
for rows.Next() {
var status string
var count int
rows.Scan(&status, &count)
stats.ByStatus[status] = count
}
}
// By priority
rows, err = s.pool.Query(ctx,
"SELECT priority, COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 GROUP BY priority",
roadmapID)
if err == nil {
defer rows.Close()
for rows.Next() {
var priority string
var count int
rows.Scan(&priority, &count)
stats.ByPriority[priority] = count
}
}
// By category
rows, err = s.pool.Query(ctx,
"SELECT category, COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 GROUP BY category",
roadmapID)
if err == nil {
defer rows.Close()
for rows.Next() {
var category string
var count int
rows.Scan(&category, &count)
stats.ByCategory[category] = count
}
}
// By department
rows, err = s.pool.Query(ctx,
"SELECT COALESCE(department, 'Unassigned'), COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 GROUP BY department",
roadmapID)
if err == nil {
defer rows.Close()
for rows.Next() {
var dept string
var count int
rows.Scan(&dept, &count)
stats.ByDepartment[dept] = count
}
}
// Overdue items
s.pool.QueryRow(ctx,
"SELECT COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 AND planned_end < NOW() AND status NOT IN ('COMPLETED', 'DEFERRED')",
roadmapID).Scan(&stats.OverdueItems)
// Upcoming items (next 7 days)
s.pool.QueryRow(ctx,
"SELECT COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 AND planned_end BETWEEN NOW() AND NOW() + INTERVAL '7 days' AND status NOT IN ('COMPLETED', 'DEFERRED')",
roadmapID).Scan(&stats.UpcomingItems)
// Total effort
s.pool.QueryRow(ctx,
"SELECT COALESCE(SUM(effort_days), 0) FROM roadmap_items WHERE roadmap_id = $1",
roadmapID).Scan(&stats.TotalEffortDays)
// Progress
completedCount := stats.ByStatus[string(ItemStatusCompleted)]
if stats.TotalItems > 0 {
stats.Progress = (completedCount * 100) / stats.TotalItems
}
return stats, nil
}
// UpdateRoadmapProgress recalculates and updates roadmap progress
func (s *Store) UpdateRoadmapProgress(ctx context.Context, roadmapID uuid.UUID) error {
var total, completed int
s.pool.QueryRow(ctx,
"SELECT COUNT(*) FROM roadmap_items WHERE roadmap_id = $1",
roadmapID).Scan(&total)
s.pool.QueryRow(ctx,
"SELECT COUNT(*) FROM roadmap_items WHERE roadmap_id = $1 AND status = 'COMPLETED'",
roadmapID).Scan(&completed)
progress := 0
if total > 0 {
progress = (completed * 100) / total
}
_, err := s.pool.Exec(ctx, `
UPDATE roadmaps SET
total_items = $2,
completed_items = $3,
progress = $4,
updated_at = $5
WHERE id = $1
`, roadmapID, total, completed, progress, time.Now().UTC())
return err
}