fix(quality): Ruff/CVE/TS-Fixes, 104 neue Tests, Complexity-Refactoring
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Failing after 30s
CI / test-python-backend-compliance (push) Successful in 30s
CI / test-python-document-crawler (push) Successful in 21s
CI / test-python-dsms-gateway (push) Successful in 17s

- Ruff: 144 auto-fixes (unused imports, == None → is None), F821/F811/F841 manuell
- CVEs: python-multipart>=0.0.22, weasyprint>=68.0, pillow>=12.1.1, npm audit fix (0 vulns)
- TS: 5 tote Drafting-Engine-Dateien entfernt, allowed-facts/sanitizer/StepHeader/context fixes
- Tests: +104 (ISMS 58, Evidence 18, VVT 14, Generation 14) → 1449 passed
- Refactoring: collect_ci_evidence (F→A), row_to_response (E→A), extract_requirements (E→A)
- Dead Code: pca-platform, 7 Go-Handler, dsr_api.py, duplicate Schemas entfernt

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-03-07 19:00:33 +01:00
parent 6509e64dd9
commit 95fcba34cd
124 changed files with 2533 additions and 15709 deletions

View File

@@ -1,451 +0,0 @@
package handlers
import (
"net/http"
"github.com/breakpilot/ai-compliance-sdk/internal/dsb"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
)
// DSBHandlers handles DSB-as-a-Service portal HTTP requests.
type DSBHandlers struct {
store *dsb.Store
}
// NewDSBHandlers creates new DSB handlers.
func NewDSBHandlers(store *dsb.Store) *DSBHandlers {
return &DSBHandlers{store: store}
}
// getDSBUserID extracts and parses the X-User-ID header as UUID.
func getDSBUserID(c *gin.Context) (uuid.UUID, bool) {
userIDStr := c.GetHeader("X-User-ID")
if userIDStr == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "X-User-ID header is required"})
return uuid.Nil, false
}
userID, err := uuid.Parse(userIDStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid X-User-ID header: must be a valid UUID"})
return uuid.Nil, false
}
return userID, true
}
// ============================================================================
// Dashboard
// ============================================================================
// GetDashboard returns the aggregated DSB dashboard.
// GET /sdk/v1/dsb/dashboard
func (h *DSBHandlers) GetDashboard(c *gin.Context) {
dsbUserID, ok := getDSBUserID(c)
if !ok {
return
}
dashboard, err := h.store.GetDashboard(c.Request.Context(), dsbUserID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, dashboard)
}
// ============================================================================
// Assignments
// ============================================================================
// CreateAssignment creates a new DSB-to-tenant assignment.
// POST /sdk/v1/dsb/assignments
func (h *DSBHandlers) CreateAssignment(c *gin.Context) {
var req dsb.CreateAssignmentRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
assignment := &dsb.Assignment{
DSBUserID: req.DSBUserID,
TenantID: req.TenantID,
Status: req.Status,
ContractStart: req.ContractStart,
ContractEnd: req.ContractEnd,
MonthlyHoursBudget: req.MonthlyHoursBudget,
Notes: req.Notes,
}
if err := h.store.CreateAssignment(c.Request.Context(), assignment); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, gin.H{"assignment": assignment})
}
// ListAssignments returns all assignments for the authenticated DSB user.
// GET /sdk/v1/dsb/assignments
func (h *DSBHandlers) ListAssignments(c *gin.Context) {
dsbUserID, ok := getDSBUserID(c)
if !ok {
return
}
assignments, err := h.store.ListAssignments(c.Request.Context(), dsbUserID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"assignments": assignments,
"total": len(assignments),
})
}
// GetAssignment retrieves a single assignment by ID.
// GET /sdk/v1/dsb/assignments/:id
func (h *DSBHandlers) GetAssignment(c *gin.Context) {
id, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
assignment, err := h.store.GetAssignment(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "assignment not found"})
return
}
c.JSON(http.StatusOK, gin.H{"assignment": assignment})
}
// UpdateAssignment updates an existing assignment.
// PUT /sdk/v1/dsb/assignments/:id
func (h *DSBHandlers) UpdateAssignment(c *gin.Context) {
id, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
assignment, err := h.store.GetAssignment(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "assignment not found"})
return
}
var req dsb.UpdateAssignmentRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Apply non-nil fields
if req.Status != nil {
assignment.Status = *req.Status
}
if req.ContractEnd != nil {
assignment.ContractEnd = req.ContractEnd
}
if req.MonthlyHoursBudget != nil {
assignment.MonthlyHoursBudget = *req.MonthlyHoursBudget
}
if req.Notes != nil {
assignment.Notes = *req.Notes
}
if err := h.store.UpdateAssignment(c.Request.Context(), assignment); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"assignment": assignment})
}
// ============================================================================
// Hours
// ============================================================================
// CreateHourEntry creates a new time tracking entry for an assignment.
// POST /sdk/v1/dsb/assignments/:id/hours
func (h *DSBHandlers) CreateHourEntry(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
var req dsb.CreateHourEntryRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
billable := true
if req.Billable != nil {
billable = *req.Billable
}
entry := &dsb.HourEntry{
AssignmentID: assignmentID,
Date: req.Date,
Hours: req.Hours,
Category: req.Category,
Description: req.Description,
Billable: billable,
}
if err := h.store.CreateHourEntry(c.Request.Context(), entry); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, gin.H{"hour_entry": entry})
}
// ListHours returns time entries for an assignment.
// GET /sdk/v1/dsb/assignments/:id/hours?month=YYYY-MM
func (h *DSBHandlers) ListHours(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
month := c.Query("month")
entries, err := h.store.ListHours(c.Request.Context(), assignmentID, month)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"hours": entries,
"total": len(entries),
})
}
// GetHoursSummary returns aggregated hour statistics for an assignment.
// GET /sdk/v1/dsb/assignments/:id/hours/summary?month=YYYY-MM
func (h *DSBHandlers) GetHoursSummary(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
month := c.Query("month")
summary, err := h.store.GetHoursSummary(c.Request.Context(), assignmentID, month)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, summary)
}
// ============================================================================
// Tasks
// ============================================================================
// CreateTask creates a new task for an assignment.
// POST /sdk/v1/dsb/assignments/:id/tasks
func (h *DSBHandlers) CreateTask(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
var req dsb.CreateTaskRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
task := &dsb.Task{
AssignmentID: assignmentID,
Title: req.Title,
Description: req.Description,
Category: req.Category,
Priority: req.Priority,
DueDate: req.DueDate,
}
if err := h.store.CreateTask(c.Request.Context(), task); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, gin.H{"task": task})
}
// ListTasks returns tasks for an assignment.
// GET /sdk/v1/dsb/assignments/:id/tasks?status=open
func (h *DSBHandlers) ListTasks(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
status := c.Query("status")
tasks, err := h.store.ListTasks(c.Request.Context(), assignmentID, status)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"tasks": tasks,
"total": len(tasks),
})
}
// UpdateTask updates an existing task.
// PUT /sdk/v1/dsb/tasks/:taskId
func (h *DSBHandlers) UpdateTask(c *gin.Context) {
taskID, err := uuid.Parse(c.Param("taskId"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid task ID"})
return
}
// We need to fetch the existing task first. Since tasks belong to assignments,
// we query by task ID directly. For now, we do a lightweight approach: bind the
// update request and apply changes via store.
var req dsb.UpdateTaskRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Fetch current task by querying all tasks and filtering. Since we don't have
// a GetTask(taskID) method, we build the task from partial data and update.
// The store UpdateTask uses the task ID to locate the row.
task := &dsb.Task{ID: taskID}
// We need to get the current values to apply partial updates correctly.
// Query the task directly.
row := h.store.Pool().QueryRow(c.Request.Context(), `
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
FROM dsb_tasks WHERE id = $1
`, taskID)
if err := row.Scan(
&task.ID, &task.AssignmentID, &task.Title, &task.Description,
&task.Category, &task.Priority, &task.Status, &task.DueDate,
&task.CompletedAt, &task.CreatedAt, &task.UpdatedAt,
); err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "task not found"})
return
}
// Apply non-nil fields
if req.Title != nil {
task.Title = *req.Title
}
if req.Description != nil {
task.Description = *req.Description
}
if req.Category != nil {
task.Category = *req.Category
}
if req.Priority != nil {
task.Priority = *req.Priority
}
if req.Status != nil {
task.Status = *req.Status
}
if req.DueDate != nil {
task.DueDate = req.DueDate
}
if err := h.store.UpdateTask(c.Request.Context(), task); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"task": task})
}
// CompleteTask marks a task as completed.
// POST /sdk/v1/dsb/tasks/:taskId/complete
func (h *DSBHandlers) CompleteTask(c *gin.Context) {
taskID, err := uuid.Parse(c.Param("taskId"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid task ID"})
return
}
if err := h.store.CompleteTask(c.Request.Context(), taskID); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"message": "task completed"})
}
// ============================================================================
// Communications
// ============================================================================
// CreateCommunication creates a new communication log entry.
// POST /sdk/v1/dsb/assignments/:id/communications
func (h *DSBHandlers) CreateCommunication(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
var req dsb.CreateCommunicationRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
comm := &dsb.Communication{
AssignmentID: assignmentID,
Direction: req.Direction,
Channel: req.Channel,
Subject: req.Subject,
Content: req.Content,
Participants: req.Participants,
}
if err := h.store.CreateCommunication(c.Request.Context(), comm); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, gin.H{"communication": comm})
}
// ListCommunications returns all communications for an assignment.
// GET /sdk/v1/dsb/assignments/:id/communications
func (h *DSBHandlers) ListCommunications(c *gin.Context) {
assignmentID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
return
}
comms, err := h.store.ListCommunications(c.Request.Context(), assignmentID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"communications": comms,
"total": len(comms),
})
}

View File

@@ -1,638 +0,0 @@
package handlers
import (
"fmt"
"net/http"
"os"
"time"
"github.com/breakpilot/ai-compliance-sdk/internal/funding"
"github.com/breakpilot/ai-compliance-sdk/internal/llm"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"gopkg.in/yaml.v3"
)
// FundingHandlers handles funding application API endpoints
type FundingHandlers struct {
store funding.Store
providerRegistry *llm.ProviderRegistry
wizardSchema *WizardSchema
bundeslandProfiles map[string]*BundeslandProfile
}
// WizardSchema represents the loaded wizard schema
type WizardSchema struct {
Metadata struct {
Version string `yaml:"version"`
Name string `yaml:"name"`
Description string `yaml:"description"`
TotalSteps int `yaml:"total_steps"`
} `yaml:"metadata"`
Steps []WizardStep `yaml:"steps"`
FundingAssistant struct {
Enabled bool `yaml:"enabled"`
Model string `yaml:"model"`
SystemPrompt string `yaml:"system_prompt"`
StepContexts map[int]string `yaml:"step_contexts"`
QuickPrompts []QuickPrompt `yaml:"quick_prompts"`
} `yaml:"funding_assistant"`
Presets map[string]Preset `yaml:"presets"`
}
// WizardStep represents a step in the wizard
type WizardStep struct {
Number int `yaml:"number" json:"number"`
ID string `yaml:"id" json:"id"`
Title string `yaml:"title" json:"title"`
Subtitle string `yaml:"subtitle" json:"subtitle"`
Description string `yaml:"description" json:"description"`
Icon string `yaml:"icon" json:"icon"`
IsRequired bool `yaml:"is_required" json:"is_required"`
Fields []WizardField `yaml:"fields" json:"fields"`
AssistantContext string `yaml:"assistant_context" json:"assistant_context"`
}
// WizardField represents a field in the wizard
type WizardField struct {
ID string `yaml:"id" json:"id"`
Type string `yaml:"type" json:"type"`
Label string `yaml:"label" json:"label"`
Placeholder string `yaml:"placeholder,omitempty" json:"placeholder,omitempty"`
Required bool `yaml:"required,omitempty" json:"required,omitempty"`
Options []FieldOption `yaml:"options,omitempty" json:"options,omitempty"`
HelpText string `yaml:"help_text,omitempty" json:"help_text,omitempty"`
MaxLength int `yaml:"max_length,omitempty" json:"max_length,omitempty"`
Min *int `yaml:"min,omitempty" json:"min,omitempty"`
Max *int `yaml:"max,omitempty" json:"max,omitempty"`
Default interface{} `yaml:"default,omitempty" json:"default,omitempty"`
Conditional string `yaml:"conditional,omitempty" json:"conditional,omitempty"`
}
// FieldOption represents an option for select fields
type FieldOption struct {
Value string `yaml:"value" json:"value"`
Label string `yaml:"label" json:"label"`
Description string `yaml:"description,omitempty" json:"description,omitempty"`
}
// QuickPrompt represents a quick prompt for the assistant
type QuickPrompt struct {
Label string `yaml:"label" json:"label"`
Prompt string `yaml:"prompt" json:"prompt"`
}
// Preset represents a BreakPilot preset
type Preset struct {
ID string `yaml:"id" json:"id"`
Name string `yaml:"name" json:"name"`
Description string `yaml:"description" json:"description"`
BudgetItems []funding.BudgetItem `yaml:"budget_items" json:"budget_items"`
AutoFill map[string]interface{} `yaml:"auto_fill" json:"auto_fill"`
}
// BundeslandProfile represents a federal state profile
type BundeslandProfile struct {
Name string `yaml:"name" json:"name"`
Short string `yaml:"short" json:"short"`
FundingPrograms []string `yaml:"funding_programs" json:"funding_programs"`
DefaultFundingRate float64 `yaml:"default_funding_rate" json:"default_funding_rate"`
RequiresMEP bool `yaml:"requires_mep" json:"requires_mep"`
ContactAuthority ContactAuthority `yaml:"contact_authority" json:"contact_authority"`
SpecialRequirements []string `yaml:"special_requirements" json:"special_requirements"`
}
// ContactAuthority represents a contact authority
type ContactAuthority struct {
Name string `yaml:"name" json:"name"`
Department string `yaml:"department,omitempty" json:"department,omitempty"`
Website string `yaml:"website" json:"website"`
Email string `yaml:"email,omitempty" json:"email,omitempty"`
}
// NewFundingHandlers creates new funding handlers
func NewFundingHandlers(store funding.Store, providerRegistry *llm.ProviderRegistry) *FundingHandlers {
h := &FundingHandlers{
store: store,
providerRegistry: providerRegistry,
}
// Load wizard schema
if err := h.loadWizardSchema(); err != nil {
fmt.Printf("Warning: Could not load wizard schema: %v\n", err)
}
// Load bundesland profiles
if err := h.loadBundeslandProfiles(); err != nil {
fmt.Printf("Warning: Could not load bundesland profiles: %v\n", err)
}
return h
}
func (h *FundingHandlers) loadWizardSchema() error {
data, err := os.ReadFile("policies/funding/foerderantrag_wizard_v1.yaml")
if err != nil {
return err
}
h.wizardSchema = &WizardSchema{}
return yaml.Unmarshal(data, h.wizardSchema)
}
func (h *FundingHandlers) loadBundeslandProfiles() error {
data, err := os.ReadFile("policies/funding/bundesland_profiles.yaml")
if err != nil {
return err
}
var profiles struct {
Bundeslaender map[string]*BundeslandProfile `yaml:"bundeslaender"`
}
if err := yaml.Unmarshal(data, &profiles); err != nil {
return err
}
h.bundeslandProfiles = profiles.Bundeslaender
return nil
}
// ============================================================================
// Application CRUD
// ============================================================================
// CreateApplication creates a new funding application
// POST /sdk/v1/funding/applications
func (h *FundingHandlers) CreateApplication(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
userID := rbac.GetUserID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
var req funding.CreateApplicationRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
app := &funding.FundingApplication{
TenantID: tenantID,
Title: req.Title,
FundingProgram: req.FundingProgram,
Status: funding.ApplicationStatusDraft,
CurrentStep: 1,
TotalSteps: 8,
WizardData: make(map[string]interface{}),
CreatedBy: userID,
UpdatedBy: userID,
}
// Initialize school profile with federal state
app.SchoolProfile = &funding.SchoolProfile{
FederalState: req.FederalState,
}
// Apply preset if specified
if req.PresetID != "" && h.wizardSchema != nil {
if preset, ok := h.wizardSchema.Presets[req.PresetID]; ok {
app.Budget = &funding.Budget{
BudgetItems: preset.BudgetItems,
}
app.WizardData["preset_id"] = req.PresetID
app.WizardData["preset_applied"] = true
for k, v := range preset.AutoFill {
app.WizardData[k] = v
}
}
}
if err := h.store.CreateApplication(c.Request.Context(), app); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
// Add history entry
h.store.AddHistoryEntry(c.Request.Context(), &funding.ApplicationHistoryEntry{
ApplicationID: app.ID,
Action: "created",
PerformedBy: userID,
Notes: "Antrag erstellt",
})
c.JSON(http.StatusCreated, app)
}
// GetApplication retrieves a funding application
// GET /sdk/v1/funding/applications/:id
func (h *FundingHandlers) GetApplication(c *gin.Context) {
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
app, err := h.store.GetApplication(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, app)
}
// ListApplications returns a list of funding applications
// GET /sdk/v1/funding/applications
func (h *FundingHandlers) ListApplications(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
filter := funding.ApplicationFilter{
Page: 1,
PageSize: 20,
}
// Parse query parameters
if status := c.Query("status"); status != "" {
s := funding.ApplicationStatus(status)
filter.Status = &s
}
if program := c.Query("program"); program != "" {
p := funding.FundingProgram(program)
filter.FundingProgram = &p
}
result, err := h.store.ListApplications(c.Request.Context(), tenantID, filter)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, result)
}
// UpdateApplication updates a funding application
// PUT /sdk/v1/funding/applications/:id
func (h *FundingHandlers) UpdateApplication(c *gin.Context) {
userID := rbac.GetUserID(c)
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
app, err := h.store.GetApplication(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
var req funding.UpdateApplicationRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if req.Title != nil {
app.Title = *req.Title
}
if req.WizardData != nil {
for k, v := range req.WizardData {
app.WizardData[k] = v
}
}
if req.CurrentStep != nil {
app.CurrentStep = *req.CurrentStep
}
app.UpdatedBy = userID
if err := h.store.UpdateApplication(c.Request.Context(), app); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, app)
}
// DeleteApplication deletes a funding application
// DELETE /sdk/v1/funding/applications/:id
func (h *FundingHandlers) DeleteApplication(c *gin.Context) {
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
if err := h.store.DeleteApplication(c.Request.Context(), id); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"message": "application archived"})
}
// ============================================================================
// Wizard Endpoints
// ============================================================================
// GetWizardSchema returns the wizard schema
// GET /sdk/v1/funding/wizard/schema
func (h *FundingHandlers) GetWizardSchema(c *gin.Context) {
if h.wizardSchema == nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "wizard schema not loaded"})
return
}
c.JSON(http.StatusOK, gin.H{
"metadata": h.wizardSchema.Metadata,
"steps": h.wizardSchema.Steps,
"presets": h.wizardSchema.Presets,
"assistant": gin.H{
"enabled": h.wizardSchema.FundingAssistant.Enabled,
"quick_prompts": h.wizardSchema.FundingAssistant.QuickPrompts,
},
})
}
// SaveWizardStep saves wizard step data
// POST /sdk/v1/funding/applications/:id/wizard
func (h *FundingHandlers) SaveWizardStep(c *gin.Context) {
userID := rbac.GetUserID(c)
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
var req funding.SaveWizardStepRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Save step data
if err := h.store.SaveWizardStep(c.Request.Context(), id, req.Step, req.Data); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
// Get updated progress
progress, err := h.store.GetWizardProgress(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
// Add history entry
h.store.AddHistoryEntry(c.Request.Context(), &funding.ApplicationHistoryEntry{
ApplicationID: id,
Action: "wizard_step_saved",
PerformedBy: userID,
Notes: fmt.Sprintf("Schritt %d gespeichert", req.Step),
})
c.JSON(http.StatusOK, progress)
}
// AskAssistant handles LLM assistant queries
// POST /sdk/v1/funding/wizard/ask
func (h *FundingHandlers) AskAssistant(c *gin.Context) {
var req funding.AssistantRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if h.wizardSchema == nil || !h.wizardSchema.FundingAssistant.Enabled {
c.JSON(http.StatusServiceUnavailable, gin.H{"error": "assistant not available"})
return
}
// Build system prompt with step context
systemPrompt := h.wizardSchema.FundingAssistant.SystemPrompt
if stepContext, ok := h.wizardSchema.FundingAssistant.StepContexts[req.CurrentStep]; ok {
systemPrompt += "\n\nKontext fuer diesen Schritt:\n" + stepContext
}
// Build messages
messages := []llm.Message{
{Role: "system", Content: systemPrompt},
}
for _, msg := range req.History {
messages = append(messages, llm.Message{
Role: msg.Role,
Content: msg.Content,
})
}
messages = append(messages, llm.Message{
Role: "user",
Content: req.Question,
})
// Generate response using registry
chatReq := &llm.ChatRequest{
Messages: messages,
Temperature: 0.3,
MaxTokens: 1000,
}
response, err := h.providerRegistry.Chat(c.Request.Context(), chatReq)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, funding.AssistantResponse{
Answer: response.Message.Content,
})
}
// ============================================================================
// Status Endpoints
// ============================================================================
// SubmitApplication submits an application for review
// POST /sdk/v1/funding/applications/:id/submit
func (h *FundingHandlers) SubmitApplication(c *gin.Context) {
userID := rbac.GetUserID(c)
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
app, err := h.store.GetApplication(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
// Validate that all required steps are completed
progress, _ := h.store.GetWizardProgress(c.Request.Context(), id)
if progress == nil || len(progress.CompletedSteps) < app.TotalSteps {
c.JSON(http.StatusBadRequest, gin.H{"error": "not all required steps completed"})
return
}
// Update status
app.Status = funding.ApplicationStatusSubmitted
now := time.Now()
app.SubmittedAt = &now
app.UpdatedBy = userID
if err := h.store.UpdateApplication(c.Request.Context(), app); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
// Add history entry
h.store.AddHistoryEntry(c.Request.Context(), &funding.ApplicationHistoryEntry{
ApplicationID: id,
Action: "submitted",
PerformedBy: userID,
Notes: "Antrag eingereicht",
})
c.JSON(http.StatusOK, app)
}
// ============================================================================
// Export Endpoints
// ============================================================================
// ExportApplication exports all documents as ZIP
// GET /sdk/v1/funding/applications/:id/export
func (h *FundingHandlers) ExportApplication(c *gin.Context) {
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
app, err := h.store.GetApplication(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
// Generate export (this will be implemented in export.go)
// For now, return a placeholder response
c.JSON(http.StatusOK, gin.H{
"message": "Export generation initiated",
"application_id": app.ID,
"status": "processing",
})
}
// PreviewApplication generates a PDF preview
// GET /sdk/v1/funding/applications/:id/preview
func (h *FundingHandlers) PreviewApplication(c *gin.Context) {
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
app, err := h.store.GetApplication(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
// Generate PDF preview (placeholder)
c.JSON(http.StatusOK, gin.H{
"message": "Preview generation initiated",
"application_id": app.ID,
})
}
// ============================================================================
// Bundesland Profile Endpoints
// ============================================================================
// GetBundeslandProfiles returns all bundesland profiles
// GET /sdk/v1/funding/bundeslaender
func (h *FundingHandlers) GetBundeslandProfiles(c *gin.Context) {
if h.bundeslandProfiles == nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "bundesland profiles not loaded"})
return
}
c.JSON(http.StatusOK, h.bundeslandProfiles)
}
// GetBundeslandProfile returns a specific bundesland profile
// GET /sdk/v1/funding/bundeslaender/:state
func (h *FundingHandlers) GetBundeslandProfile(c *gin.Context) {
state := c.Param("state")
if h.bundeslandProfiles == nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "bundesland profiles not loaded"})
return
}
profile, ok := h.bundeslandProfiles[state]
if !ok {
c.JSON(http.StatusNotFound, gin.H{"error": "bundesland not found"})
return
}
c.JSON(http.StatusOK, profile)
}
// ============================================================================
// Statistics Endpoint
// ============================================================================
// GetStatistics returns funding statistics
// GET /sdk/v1/funding/statistics
func (h *FundingHandlers) GetStatistics(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
stats, err := h.store.GetStatistics(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, stats)
}
// ============================================================================
// History Endpoint
// ============================================================================
// GetApplicationHistory returns the audit trail
// GET /sdk/v1/funding/applications/:id/history
func (h *FundingHandlers) GetApplicationHistory(c *gin.Context) {
idStr := c.Param("id")
id, err := uuid.Parse(idStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid application ID"})
return
}
history, err := h.store.GetHistory(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, history)
}

View File

@@ -1,188 +0,0 @@
package handlers
import (
"net/http"
"github.com/breakpilot/ai-compliance-sdk/internal/gci"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/gin-gonic/gin"
)
type GCIHandlers struct {
engine *gci.Engine
}
func NewGCIHandlers(engine *gci.Engine) *GCIHandlers {
return &GCIHandlers{engine: engine}
}
// GetScore returns the GCI score for the current tenant
// GET /sdk/v1/gci/score
func (h *GCIHandlers) GetScore(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
profile := c.DefaultQuery("profile", "default")
result := h.engine.Calculate(tenantID, profile)
c.JSON(http.StatusOK, result)
}
// GetScoreBreakdown returns the detailed 4-level GCI breakdown
// GET /sdk/v1/gci/score/breakdown
func (h *GCIHandlers) GetScoreBreakdown(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
profile := c.DefaultQuery("profile", "default")
breakdown := h.engine.CalculateBreakdown(tenantID, profile)
c.JSON(http.StatusOK, breakdown)
}
// GetHistory returns historical GCI snapshots for trend analysis
// GET /sdk/v1/gci/score/history
func (h *GCIHandlers) GetHistory(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
history := h.engine.GetHistory(tenantID)
c.JSON(http.StatusOK, gin.H{
"tenant_id": tenantID,
"snapshots": history,
"total": len(history),
})
}
// GetMatrix returns the compliance matrix (roles x regulations)
// GET /sdk/v1/gci/matrix
func (h *GCIHandlers) GetMatrix(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
matrix := h.engine.GetMatrix(tenantID)
c.JSON(http.StatusOK, gin.H{
"tenant_id": tenantID,
"matrix": matrix,
})
}
// GetAuditTrail returns the audit trail for the latest GCI calculation
// GET /sdk/v1/gci/audit-trail
func (h *GCIHandlers) GetAuditTrail(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
profile := c.DefaultQuery("profile", "default")
result := h.engine.Calculate(tenantID, profile)
c.JSON(http.StatusOK, gin.H{
"tenant_id": tenantID,
"gci_score": result.GCIScore,
"audit_trail": result.AuditTrail,
})
}
// GetNIS2Score returns the NIS2-specific compliance score
// GET /sdk/v1/gci/nis2/score
func (h *GCIHandlers) GetNIS2Score(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
score := gci.CalculateNIS2Score(tenantID)
c.JSON(http.StatusOK, score)
}
// ListNIS2Roles returns available NIS2 responsibility roles
// GET /sdk/v1/gci/nis2/roles
func (h *GCIHandlers) ListNIS2Roles(c *gin.Context) {
roles := gci.ListNIS2Roles()
c.JSON(http.StatusOK, gin.H{
"roles": roles,
"total": len(roles),
})
}
// AssignNIS2Role assigns a NIS2 role to a user (stub - returns mock)
// POST /sdk/v1/gci/nis2/roles/assign
func (h *GCIHandlers) AssignNIS2Role(c *gin.Context) {
var req struct {
RoleID string `json:"role_id" binding:"required"`
UserID string `json:"user_id" binding:"required"`
}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
role, found := gci.GetNIS2Role(req.RoleID)
if !found {
c.JSON(http.StatusNotFound, gin.H{"error": "NIS2 role not found"})
return
}
c.JSON(http.StatusOK, gin.H{
"status": "assigned",
"role": role,
"user_id": req.UserID,
})
}
// GetISOGapAnalysis returns the ISO 27001 gap analysis
// GET /sdk/v1/gci/iso/gap-analysis
func (h *GCIHandlers) GetISOGapAnalysis(c *gin.Context) {
tenantID := rbac.GetTenantID(c).String()
analysis := gci.CalculateISOGapAnalysis(tenantID)
c.JSON(http.StatusOK, analysis)
}
// ListISOMappings returns all ISO 27001 control mappings
// GET /sdk/v1/gci/iso/mappings
func (h *GCIHandlers) ListISOMappings(c *gin.Context) {
category := c.Query("category")
if category != "" {
controls := gci.GetISOControlsByCategory(category)
c.JSON(http.StatusOK, gin.H{
"controls": controls,
"total": len(controls),
"category": category,
})
return
}
categories := []string{"A.5", "A.6", "A.7", "A.8"}
result := make(map[string][]gci.ISOControl)
total := 0
for _, cat := range categories {
controls := gci.GetISOControlsByCategory(cat)
if len(controls) > 0 {
result[cat] = controls
total += len(controls)
}
}
c.JSON(http.StatusOK, gin.H{
"categories": result,
"total": total,
})
}
// GetISOMapping returns a single ISO control by ID
// GET /sdk/v1/gci/iso/mappings/:controlId
func (h *GCIHandlers) GetISOMapping(c *gin.Context) {
controlID := c.Param("controlId")
control, found := gci.GetISOControlByID(controlID)
if !found {
c.JSON(http.StatusNotFound, gin.H{"error": "ISO control not found"})
return
}
c.JSON(http.StatusOK, control)
}
// GetWeightProfiles returns available weighting profiles
// GET /sdk/v1/gci/profiles
func (h *GCIHandlers) GetWeightProfiles(c *gin.Context) {
profiles := []string{"default", "nis2_relevant", "ki_nutzer"}
result := make([]gci.WeightProfile, 0, len(profiles))
for _, id := range profiles {
result = append(result, gci.GetProfile(id))
}
c.JSON(http.StatusOK, gin.H{
"profiles": result,
})
}

View File

@@ -1,115 +0,0 @@
package handlers
import (
"net/http"
"github.com/breakpilot/ai-compliance-sdk/internal/industry"
"github.com/gin-gonic/gin"
)
// IndustryHandlers handles industry-specific compliance template requests.
// All data is static (embedded Go structs), so no store/database is needed.
type IndustryHandlers struct{}
// NewIndustryHandlers creates new industry handlers
func NewIndustryHandlers() *IndustryHandlers {
return &IndustryHandlers{}
}
// ============================================================================
// Industry Template Endpoints
// ============================================================================
// ListIndustries returns a summary list of all available industry templates.
// GET /sdk/v1/industries
func (h *IndustryHandlers) ListIndustries(c *gin.Context) {
templates := industry.GetAllTemplates()
summaries := make([]industry.IndustrySummary, 0, len(templates))
for _, t := range templates {
summaries = append(summaries, industry.IndustrySummary{
Slug: t.Slug,
Name: t.Name,
Description: t.Description,
Icon: t.Icon,
RegulationCount: len(t.Regulations),
TemplateCount: len(t.VVTTemplates),
})
}
c.JSON(http.StatusOK, industry.IndustryListResponse{
Industries: summaries,
Total: len(summaries),
})
}
// GetIndustry returns the full industry template for a given slug.
// GET /sdk/v1/industries/:slug
func (h *IndustryHandlers) GetIndustry(c *gin.Context) {
slug := c.Param("slug")
tmpl := industry.GetTemplateBySlug(slug)
if tmpl == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
return
}
c.JSON(http.StatusOK, tmpl)
}
// GetVVTTemplates returns only the VVT templates for a given industry.
// GET /sdk/v1/industries/:slug/vvt-templates
func (h *IndustryHandlers) GetVVTTemplates(c *gin.Context) {
slug := c.Param("slug")
tmpl := industry.GetTemplateBySlug(slug)
if tmpl == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
return
}
c.JSON(http.StatusOK, gin.H{
"slug": tmpl.Slug,
"industry": tmpl.Name,
"vvt_templates": tmpl.VVTTemplates,
"total": len(tmpl.VVTTemplates),
})
}
// GetTOMRecommendations returns only the TOM recommendations for a given industry.
// GET /sdk/v1/industries/:slug/tom-recommendations
func (h *IndustryHandlers) GetTOMRecommendations(c *gin.Context) {
slug := c.Param("slug")
tmpl := industry.GetTemplateBySlug(slug)
if tmpl == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
return
}
c.JSON(http.StatusOK, gin.H{
"slug": tmpl.Slug,
"industry": tmpl.Name,
"tom_recommendations": tmpl.TOMRecommendations,
"total": len(tmpl.TOMRecommendations),
})
}
// GetRiskScenarios returns only the risk scenarios for a given industry.
// GET /sdk/v1/industries/:slug/risk-scenarios
func (h *IndustryHandlers) GetRiskScenarios(c *gin.Context) {
slug := c.Param("slug")
tmpl := industry.GetTemplateBySlug(slug)
if tmpl == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
return
}
c.JSON(http.StatusOK, gin.H{
"slug": tmpl.Slug,
"industry": tmpl.Name,
"risk_scenarios": tmpl.RiskScenarios,
"total": len(tmpl.RiskScenarios),
})
}

View File

@@ -1,268 +0,0 @@
package handlers
import (
"net/http"
"github.com/breakpilot/ai-compliance-sdk/internal/multitenant"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
)
// MultiTenantHandlers handles multi-tenant administration endpoints.
type MultiTenantHandlers struct {
store *multitenant.Store
rbacStore *rbac.Store
}
// NewMultiTenantHandlers creates new multi-tenant handlers.
func NewMultiTenantHandlers(store *multitenant.Store, rbacStore *rbac.Store) *MultiTenantHandlers {
return &MultiTenantHandlers{
store: store,
rbacStore: rbacStore,
}
}
// GetOverview returns all tenants with compliance scores and module highlights.
// GET /sdk/v1/multi-tenant/overview
func (h *MultiTenantHandlers) GetOverview(c *gin.Context) {
overview, err := h.store.GetOverview(c.Request.Context())
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, overview)
}
// GetTenantDetail returns detailed compliance info for one tenant.
// GET /sdk/v1/multi-tenant/tenants/:id
func (h *MultiTenantHandlers) GetTenantDetail(c *gin.Context) {
id, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
return
}
detail, err := h.store.GetTenantDetail(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
return
}
c.JSON(http.StatusOK, detail)
}
// CreateTenant creates a new tenant with default setup.
// It creates the tenant via the RBAC store and then creates a default "main" namespace.
// POST /sdk/v1/multi-tenant/tenants
func (h *MultiTenantHandlers) CreateTenant(c *gin.Context) {
var req multitenant.CreateTenantRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Build the tenant from the request
tenant := &rbac.Tenant{
Name: req.Name,
Slug: req.Slug,
MaxUsers: req.MaxUsers,
LLMQuotaMonthly: req.LLMQuotaMonthly,
}
// Create tenant via RBAC store (assigns ID, timestamps, defaults)
if err := h.rbacStore.CreateTenant(c.Request.Context(), tenant); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
// Create default "main" namespace for the new tenant
defaultNamespace := &rbac.Namespace{
TenantID: tenant.ID,
Name: "Main",
Slug: "main",
}
if err := h.rbacStore.CreateNamespace(c.Request.Context(), defaultNamespace); err != nil {
// Tenant was created successfully but namespace creation failed.
// Log and continue -- the tenant is still usable.
c.JSON(http.StatusCreated, gin.H{
"tenant": tenant,
"warning": "tenant created but default namespace creation failed: " + err.Error(),
})
return
}
c.JSON(http.StatusCreated, gin.H{
"tenant": tenant,
"namespace": defaultNamespace,
})
}
// UpdateTenant performs a partial update of tenant settings.
// Only non-nil fields in the request body are applied.
// PUT /sdk/v1/multi-tenant/tenants/:id
func (h *MultiTenantHandlers) UpdateTenant(c *gin.Context) {
id, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
return
}
var req multitenant.UpdateTenantRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Fetch the existing tenant so we can apply partial updates
tenant, err := h.rbacStore.GetTenant(c.Request.Context(), id)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
return
}
// Apply only the fields that were provided
if req.Name != nil {
tenant.Name = *req.Name
}
if req.MaxUsers != nil {
tenant.MaxUsers = *req.MaxUsers
}
if req.LLMQuotaMonthly != nil {
tenant.LLMQuotaMonthly = *req.LLMQuotaMonthly
}
if req.Status != nil {
tenant.Status = rbac.TenantStatus(*req.Status)
}
if err := h.rbacStore.UpdateTenant(c.Request.Context(), tenant); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, tenant)
}
// ListNamespaces returns all namespaces for a specific tenant.
// GET /sdk/v1/multi-tenant/tenants/:id/namespaces
func (h *MultiTenantHandlers) ListNamespaces(c *gin.Context) {
tenantID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
return
}
namespaces, err := h.rbacStore.ListNamespaces(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"namespaces": namespaces,
"total": len(namespaces),
})
}
// CreateNamespace creates a new namespace within a tenant.
// POST /sdk/v1/multi-tenant/tenants/:id/namespaces
func (h *MultiTenantHandlers) CreateNamespace(c *gin.Context) {
tenantID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
return
}
// Verify the tenant exists
_, err = h.rbacStore.GetTenant(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
return
}
var req multitenant.CreateNamespaceRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
namespace := &rbac.Namespace{
TenantID: tenantID,
Name: req.Name,
Slug: req.Slug,
}
// Apply optional fields if provided
if req.IsolationLevel != "" {
namespace.IsolationLevel = rbac.IsolationLevel(req.IsolationLevel)
}
if req.DataClassification != "" {
namespace.DataClassification = rbac.DataClassification(req.DataClassification)
}
if err := h.rbacStore.CreateNamespace(c.Request.Context(), namespace); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, namespace)
}
// SwitchTenant returns the tenant info needed for the frontend to switch context.
// The caller provides a tenant_id and receives back the tenant details needed
// to update the frontend's active tenant state.
// POST /sdk/v1/multi-tenant/switch
func (h *MultiTenantHandlers) SwitchTenant(c *gin.Context) {
var req multitenant.SwitchTenantRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
tenantID, err := uuid.Parse(req.TenantID)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
return
}
tenant, err := h.rbacStore.GetTenant(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
return
}
// Verify the tenant is active
if tenant.Status != rbac.TenantStatusActive {
c.JSON(http.StatusForbidden, gin.H{
"error": "tenant not active",
"status": string(tenant.Status),
})
return
}
// Get namespaces for the tenant so the frontend can populate namespace selectors
namespaces, err := h.rbacStore.ListNamespaces(c.Request.Context(), tenantID)
if err != nil {
// Non-fatal: return tenant info without namespaces
c.JSON(http.StatusOK, gin.H{
"tenant": multitenant.SwitchTenantResponse{
TenantID: tenant.ID,
TenantName: tenant.Name,
TenantSlug: tenant.Slug,
Status: string(tenant.Status),
},
})
return
}
c.JSON(http.StatusOK, gin.H{
"tenant": multitenant.SwitchTenantResponse{
TenantID: tenant.ID,
TenantName: tenant.Name,
TenantSlug: tenant.Slug,
Status: string(tenant.Status),
},
"namespaces": namespaces,
})
}

View File

@@ -1,97 +0,0 @@
package handlers
import (
"net/http"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
)
type ReportingHandlers struct {
store *reporting.Store
}
func NewReportingHandlers(store *reporting.Store) *ReportingHandlers {
return &ReportingHandlers{store: store}
}
// GetExecutiveReport generates a comprehensive compliance report
// GET /sdk/v1/reporting/executive
func (h *ReportingHandlers) GetExecutiveReport(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, report)
}
// GetComplianceScore returns just the overall compliance score
// GET /sdk/v1/reporting/score
func (h *ReportingHandlers) GetComplianceScore(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"compliance_score": report.ComplianceScore,
"risk_level": report.RiskOverview.OverallLevel,
"generated_at": report.GeneratedAt,
})
}
// GetUpcomingDeadlines returns deadlines across all modules
// GET /sdk/v1/reporting/deadlines
func (h *ReportingHandlers) GetUpcomingDeadlines(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"deadlines": report.UpcomingDeadlines,
"total": len(report.UpcomingDeadlines),
})
}
// GetRiskOverview returns the aggregated risk assessment
// GET /sdk/v1/reporting/risks
func (h *ReportingHandlers) GetRiskOverview(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
if tenantID == uuid.Nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant ID required"})
return
}
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, report.RiskOverview)
}

View File

@@ -1,631 +0,0 @@
package handlers
import (
"crypto/rand"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/breakpilot/ai-compliance-sdk/internal/sso"
"github.com/gin-gonic/gin"
"github.com/golang-jwt/jwt/v5"
"github.com/google/uuid"
)
// SSOHandlers handles SSO-related HTTP requests.
type SSOHandlers struct {
store *sso.Store
jwtSecret string
}
// NewSSOHandlers creates new SSO handlers.
func NewSSOHandlers(store *sso.Store, jwtSecret string) *SSOHandlers {
return &SSOHandlers{store: store, jwtSecret: jwtSecret}
}
// ============================================================================
// SSO Configuration CRUD
// ============================================================================
// CreateConfig creates a new SSO configuration for the tenant.
// POST /sdk/v1/sso/configs
func (h *SSOHandlers) CreateConfig(c *gin.Context) {
var req sso.CreateSSOConfigRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
tenantID := rbac.GetTenantID(c)
cfg, err := h.store.CreateConfig(c.Request.Context(), tenantID, &req)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusCreated, gin.H{"config": cfg})
}
// ListConfigs lists all SSO configurations for the tenant.
// GET /sdk/v1/sso/configs
func (h *SSOHandlers) ListConfigs(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
configs, err := h.store.ListConfigs(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"configs": configs,
"total": len(configs),
})
}
// GetConfig retrieves an SSO configuration by ID.
// GET /sdk/v1/sso/configs/:id
func (h *SSOHandlers) GetConfig(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
configID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
return
}
cfg, err := h.store.GetConfig(c.Request.Context(), tenantID, configID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if cfg == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "sso configuration not found"})
return
}
c.JSON(http.StatusOK, gin.H{"config": cfg})
}
// UpdateConfig updates an SSO configuration.
// PUT /sdk/v1/sso/configs/:id
func (h *SSOHandlers) UpdateConfig(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
configID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
return
}
var req sso.UpdateSSOConfigRequest
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
cfg, err := h.store.UpdateConfig(c.Request.Context(), tenantID, configID, &req)
if err != nil {
if err.Error() == "sso configuration not found" {
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"config": cfg})
}
// DeleteConfig deletes an SSO configuration.
// DELETE /sdk/v1/sso/configs/:id
func (h *SSOHandlers) DeleteConfig(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
configID, err := uuid.Parse(c.Param("id"))
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
return
}
if err := h.store.DeleteConfig(c.Request.Context(), tenantID, configID); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"message": "sso configuration deleted"})
}
// ============================================================================
// SSO Users
// ============================================================================
// ListUsers lists all SSO-provisioned users for the tenant.
// GET /sdk/v1/sso/users
func (h *SSOHandlers) ListUsers(c *gin.Context) {
tenantID := rbac.GetTenantID(c)
users, err := h.store.ListUsers(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{
"users": users,
"total": len(users),
})
}
// ============================================================================
// OIDC Flow
// ============================================================================
// InitiateOIDCLogin initiates the OIDC authorization code flow.
// It looks up the enabled SSO config for the tenant, builds the authorization
// URL, sets a state cookie, and redirects the user to the IdP.
// GET /sdk/v1/sso/oidc/login
func (h *SSOHandlers) InitiateOIDCLogin(c *gin.Context) {
// Resolve tenant ID from query param
tenantIDStr := c.Query("tenant_id")
if tenantIDStr == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant_id query parameter is required"})
return
}
tenantID, err := uuid.Parse(tenantIDStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant_id"})
return
}
// Look up the enabled SSO config
cfg, err := h.store.GetEnabledConfig(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if cfg == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "no enabled SSO configuration found for this tenant"})
return
}
if cfg.ProviderType != sso.ProviderTypeOIDC {
c.JSON(http.StatusBadRequest, gin.H{"error": "SSO configuration is not OIDC"})
return
}
// Discover the authorization endpoint
discoveryURL := strings.TrimSuffix(cfg.OIDCIssuerURL, "/") + "/.well-known/openid-configuration"
authEndpoint, _, _, err := discoverOIDCEndpoints(discoveryURL)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("OIDC discovery failed: %v", err)})
return
}
// Generate state parameter (random bytes + tenant_id for correlation)
stateBytes := make([]byte, 32)
if _, err := rand.Read(stateBytes); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate state"})
return
}
state := base64.URLEncoding.EncodeToString(stateBytes) + "." + tenantID.String()
// Generate nonce
nonceBytes := make([]byte, 16)
if _, err := rand.Read(nonceBytes); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate nonce"})
return
}
nonce := base64.URLEncoding.EncodeToString(nonceBytes)
// Build authorization URL
scopes := cfg.OIDCScopes
if len(scopes) == 0 {
scopes = []string{"openid", "profile", "email"}
}
params := url.Values{
"client_id": {cfg.OIDCClientID},
"redirect_uri": {cfg.OIDCRedirectURI},
"response_type": {"code"},
"scope": {strings.Join(scopes, " ")},
"state": {state},
"nonce": {nonce},
}
authURL := authEndpoint + "?" + params.Encode()
// Set state cookie for CSRF protection (HttpOnly, 10 min expiry)
c.SetCookie("sso_state", state, 600, "/", "", true, true)
c.SetCookie("sso_nonce", nonce, 600, "/", "", true, true)
c.Redirect(http.StatusFound, authURL)
}
// HandleOIDCCallback handles the OIDC authorization code callback from the IdP.
// It validates the state, exchanges the code for tokens, extracts user info,
// performs JIT user provisioning, and issues a JWT.
// GET /sdk/v1/sso/oidc/callback
func (h *SSOHandlers) HandleOIDCCallback(c *gin.Context) {
// Check for errors from the IdP
if errParam := c.Query("error"); errParam != "" {
errDesc := c.Query("error_description")
c.JSON(http.StatusBadRequest, gin.H{
"error": errParam,
"description": errDesc,
})
return
}
code := c.Query("code")
stateParam := c.Query("state")
if code == "" || stateParam == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "missing code or state parameter"})
return
}
// Validate state cookie
stateCookie, err := c.Cookie("sso_state")
if err != nil || stateCookie != stateParam {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid state parameter (CSRF check failed)"})
return
}
// Extract tenant ID from state
parts := strings.SplitN(stateParam, ".", 2)
if len(parts) != 2 {
c.JSON(http.StatusBadRequest, gin.H{"error": "malformed state parameter"})
return
}
tenantID, err := uuid.Parse(parts[1])
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant_id in state"})
return
}
// Look up the enabled SSO config
cfg, err := h.store.GetEnabledConfig(c.Request.Context(), tenantID)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if cfg == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "no enabled SSO configuration found"})
return
}
// Discover OIDC endpoints
discoveryURL := strings.TrimSuffix(cfg.OIDCIssuerURL, "/") + "/.well-known/openid-configuration"
_, tokenEndpoint, userInfoEndpoint, err := discoverOIDCEndpoints(discoveryURL)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("OIDC discovery failed: %v", err)})
return
}
// Exchange authorization code for tokens
tokenResp, err := exchangeCodeForTokens(tokenEndpoint, code, cfg.OIDCClientID, cfg.OIDCClientSecret, cfg.OIDCRedirectURI)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("token exchange failed: %v", err)})
return
}
// Extract user claims from ID token or UserInfo endpoint
claims, err := extractUserClaims(tokenResp, userInfoEndpoint)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("failed to extract user claims: %v", err)})
return
}
sub := getStringClaim(claims, "sub")
email := getStringClaim(claims, "email")
name := getStringClaim(claims, "name")
groups := getStringSliceClaim(claims, "groups")
if sub == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "ID token missing 'sub' claim"})
return
}
if email == "" {
email = sub
}
if name == "" {
name = email
}
// JIT provision the user
user, err := h.store.UpsertUser(c.Request.Context(), tenantID, cfg.ID, sub, email, name, groups)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("user provisioning failed: %v", err)})
return
}
// Determine roles from role mapping
roles := resolveRoles(cfg, groups)
// Generate JWT
ssoClaims := sso.SSOClaims{
UserID: user.ID,
TenantID: tenantID,
Email: user.Email,
DisplayName: user.DisplayName,
Roles: roles,
SSOConfigID: cfg.ID,
}
jwtToken, err := h.generateJWT(ssoClaims)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("JWT generation failed: %v", err)})
return
}
// Clear state cookies
c.SetCookie("sso_state", "", -1, "/", "", true, true)
c.SetCookie("sso_nonce", "", -1, "/", "", true, true)
// Return JWT as JSON (the frontend can also handle redirect)
c.JSON(http.StatusOK, gin.H{
"token": jwtToken,
"user": user,
"roles": roles,
})
}
// ============================================================================
// JWT Generation
// ============================================================================
// generateJWT creates a signed JWT token containing the SSO claims.
func (h *SSOHandlers) generateJWT(claims sso.SSOClaims) (string, error) {
now := time.Now().UTC()
expiry := now.Add(24 * time.Hour)
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
"user_id": claims.UserID.String(),
"tenant_id": claims.TenantID.String(),
"email": claims.Email,
"display_name": claims.DisplayName,
"roles": claims.Roles,
"sso_config_id": claims.SSOConfigID.String(),
"iss": "ai-compliance-sdk",
"iat": now.Unix(),
"exp": expiry.Unix(),
})
tokenString, err := token.SignedString([]byte(h.jwtSecret))
if err != nil {
return "", fmt.Errorf("failed to sign JWT: %w", err)
}
return tokenString, nil
}
// ============================================================================
// OIDC Discovery & Token Exchange (manual HTTP, no external OIDC library)
// ============================================================================
// oidcDiscoveryResponse holds the relevant fields from the OIDC discovery document.
type oidcDiscoveryResponse struct {
AuthorizationEndpoint string `json:"authorization_endpoint"`
TokenEndpoint string `json:"token_endpoint"`
UserinfoEndpoint string `json:"userinfo_endpoint"`
JwksURI string `json:"jwks_uri"`
Issuer string `json:"issuer"`
}
// discoverOIDCEndpoints fetches the OIDC discovery document and returns
// the authorization, token, and userinfo endpoints.
func discoverOIDCEndpoints(discoveryURL string) (authEndpoint, tokenEndpoint, userInfoEndpoint string, err error) {
client := &http.Client{Timeout: 10 * time.Second}
resp, err := client.Get(discoveryURL)
if err != nil {
return "", "", "", fmt.Errorf("failed to fetch discovery document: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return "", "", "", fmt.Errorf("discovery endpoint returned %d: %s", resp.StatusCode, string(body))
}
var discovery oidcDiscoveryResponse
if err := json.NewDecoder(resp.Body).Decode(&discovery); err != nil {
return "", "", "", fmt.Errorf("failed to decode discovery document: %w", err)
}
if discovery.AuthorizationEndpoint == "" {
return "", "", "", fmt.Errorf("discovery document missing authorization_endpoint")
}
if discovery.TokenEndpoint == "" {
return "", "", "", fmt.Errorf("discovery document missing token_endpoint")
}
return discovery.AuthorizationEndpoint, discovery.TokenEndpoint, discovery.UserinfoEndpoint, nil
}
// oidcTokenResponse holds the response from the OIDC token endpoint.
type oidcTokenResponse struct {
AccessToken string `json:"access_token"`
IDToken string `json:"id_token"`
TokenType string `json:"token_type"`
ExpiresIn int `json:"expires_in"`
RefreshToken string `json:"refresh_token,omitempty"`
}
// exchangeCodeForTokens exchanges an authorization code for tokens at the token endpoint.
func exchangeCodeForTokens(tokenEndpoint, code, clientID, clientSecret, redirectURI string) (*oidcTokenResponse, error) {
client := &http.Client{Timeout: 10 * time.Second}
data := url.Values{
"grant_type": {"authorization_code"},
"code": {code},
"client_id": {clientID},
"redirect_uri": {redirectURI},
}
req, err := http.NewRequest("POST", tokenEndpoint, strings.NewReader(data.Encode()))
if err != nil {
return nil, fmt.Errorf("failed to create token request: %w", err)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
// Use client_secret_basic if provided
if clientSecret != "" {
req.SetBasicAuth(clientID, clientSecret)
}
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("token request failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("token endpoint returned %d: %s", resp.StatusCode, string(body))
}
var tokenResp oidcTokenResponse
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
return nil, fmt.Errorf("failed to decode token response: %w", err)
}
return &tokenResp, nil
}
// extractUserClaims extracts user claims from the ID token payload.
// If the ID token is unavailable or incomplete, it falls back to the UserInfo endpoint.
func extractUserClaims(tokenResp *oidcTokenResponse, userInfoEndpoint string) (map[string]interface{}, error) {
claims := make(map[string]interface{})
// Try to decode ID token payload (without signature verification for claims extraction;
// in production, you should verify the signature using the JWKS endpoint)
if tokenResp.IDToken != "" {
parts := strings.Split(tokenResp.IDToken, ".")
if len(parts) == 3 {
payload, err := base64.RawURLEncoding.DecodeString(parts[1])
if err == nil {
if err := json.Unmarshal(payload, &claims); err == nil && claims["sub"] != nil {
return claims, nil
}
}
}
}
// Fallback to UserInfo endpoint
if userInfoEndpoint != "" && tokenResp.AccessToken != "" {
userClaims, err := fetchUserInfo(userInfoEndpoint, tokenResp.AccessToken)
if err == nil && userClaims["sub"] != nil {
return userClaims, nil
}
}
if claims["sub"] != nil {
return claims, nil
}
return nil, fmt.Errorf("could not extract user claims from ID token or UserInfo endpoint")
}
// fetchUserInfo calls the OIDC UserInfo endpoint with the access token.
func fetchUserInfo(userInfoEndpoint, accessToken string) (map[string]interface{}, error) {
client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", userInfoEndpoint, nil)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+accessToken)
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("userinfo endpoint returned %d", resp.StatusCode)
}
var claims map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&claims); err != nil {
return nil, err
}
return claims, nil
}
// ============================================================================
// Claim Extraction Helpers
// ============================================================================
// getStringClaim extracts a string claim from a claims map.
func getStringClaim(claims map[string]interface{}, key string) string {
if v, ok := claims[key]; ok {
if s, ok := v.(string); ok {
return s
}
}
return ""
}
// getStringSliceClaim extracts a string slice claim from a claims map.
func getStringSliceClaim(claims map[string]interface{}, key string) []string {
v, ok := claims[key]
if !ok {
return nil
}
switch val := v.(type) {
case []interface{}:
result := make([]string, 0, len(val))
for _, item := range val {
if s, ok := item.(string); ok {
result = append(result, s)
}
}
return result
case []string:
return val
default:
return nil
}
}
// resolveRoles maps SSO groups to internal roles using the config's role mapping.
// If no groups match, the default role is returned.
func resolveRoles(cfg *sso.SSOConfig, groups []string) []string {
if cfg.RoleMapping == nil || len(cfg.RoleMapping) == 0 {
if cfg.DefaultRoleID != nil {
return []string{cfg.DefaultRoleID.String()}
}
return []string{"compliance_user"}
}
roleSet := make(map[string]bool)
for _, group := range groups {
if role, ok := cfg.RoleMapping[group]; ok {
roleSet[role] = true
}
}
if len(roleSet) == 0 {
if cfg.DefaultRoleID != nil {
return []string{cfg.DefaultRoleID.String()}
}
return []string{"compliance_user"}
}
roles := make([]string, 0, len(roleSet))
for role := range roleSet {
roles = append(roles, role)
}
return roles
}

View File

@@ -1,164 +0,0 @@
package dsb
import (
"time"
"github.com/google/uuid"
)
// ============================================================================
// Core Models
// ============================================================================
// Assignment represents a DSB-to-tenant assignment.
type Assignment struct {
ID uuid.UUID `json:"id"`
DSBUserID uuid.UUID `json:"dsb_user_id"`
TenantID uuid.UUID `json:"tenant_id"`
TenantName string `json:"tenant_name"` // populated via JOIN
TenantSlug string `json:"tenant_slug"` // populated via JOIN
Status string `json:"status"` // active, paused, terminated
ContractStart time.Time `json:"contract_start"`
ContractEnd *time.Time `json:"contract_end,omitempty"`
MonthlyHoursBudget float64 `json:"monthly_hours_budget"`
Notes string `json:"notes"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// HourEntry represents a DSB time tracking entry.
type HourEntry struct {
ID uuid.UUID `json:"id"`
AssignmentID uuid.UUID `json:"assignment_id"`
Date time.Time `json:"date"`
Hours float64 `json:"hours"`
Category string `json:"category"` // dsfa_review, consultation, audit, training, incident_response, documentation, meeting, other
Description string `json:"description"`
Billable bool `json:"billable"`
CreatedAt time.Time `json:"created_at"`
}
// Task represents a DSB task/work item.
type Task struct {
ID uuid.UUID `json:"id"`
AssignmentID uuid.UUID `json:"assignment_id"`
Title string `json:"title"`
Description string `json:"description"`
Category string `json:"category"` // dsfa_review, dsr_response, incident_review, audit_preparation, policy_review, training, consultation, other
Priority string `json:"priority"` // low, medium, high, urgent
Status string `json:"status"` // open, in_progress, waiting, completed, cancelled
DueDate *time.Time `json:"due_date,omitempty"`
CompletedAt *time.Time `json:"completed_at,omitempty"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// Communication represents a DSB communication log entry.
type Communication struct {
ID uuid.UUID `json:"id"`
AssignmentID uuid.UUID `json:"assignment_id"`
Direction string `json:"direction"` // inbound, outbound
Channel string `json:"channel"` // email, phone, meeting, portal, letter
Subject string `json:"subject"`
Content string `json:"content"`
Participants string `json:"participants"`
CreatedAt time.Time `json:"created_at"`
}
// ============================================================================
// Dashboard Models
// ============================================================================
// DSBDashboard provides the aggregated overview for a DSB user.
type DSBDashboard struct {
Assignments []AssignmentOverview `json:"assignments"`
TotalAssignments int `json:"total_assignments"`
ActiveAssignments int `json:"active_assignments"`
TotalHoursThisMonth float64 `json:"total_hours_this_month"`
OpenTasks int `json:"open_tasks"`
UrgentTasks int `json:"urgent_tasks"`
GeneratedAt time.Time `json:"generated_at"`
}
// AssignmentOverview enriches an Assignment with aggregated metrics.
type AssignmentOverview struct {
Assignment
ComplianceScore int `json:"compliance_score"`
HoursThisMonth float64 `json:"hours_this_month"`
HoursBudget float64 `json:"hours_budget"`
OpenTaskCount int `json:"open_task_count"`
UrgentTaskCount int `json:"urgent_task_count"`
NextDeadline *time.Time `json:"next_deadline,omitempty"`
}
// ============================================================================
// Request Models
// ============================================================================
// CreateAssignmentRequest is the request body for creating an assignment.
type CreateAssignmentRequest struct {
DSBUserID uuid.UUID `json:"dsb_user_id" binding:"required"`
TenantID uuid.UUID `json:"tenant_id" binding:"required"`
Status string `json:"status"`
ContractStart time.Time `json:"contract_start" binding:"required"`
ContractEnd *time.Time `json:"contract_end,omitempty"`
MonthlyHoursBudget float64 `json:"monthly_hours_budget"`
Notes string `json:"notes"`
}
// UpdateAssignmentRequest is the request body for updating an assignment.
type UpdateAssignmentRequest struct {
Status *string `json:"status,omitempty"`
ContractEnd *time.Time `json:"contract_end,omitempty"`
MonthlyHoursBudget *float64 `json:"monthly_hours_budget,omitempty"`
Notes *string `json:"notes,omitempty"`
}
// CreateHourEntryRequest is the request body for creating a time entry.
type CreateHourEntryRequest struct {
Date time.Time `json:"date" binding:"required"`
Hours float64 `json:"hours" binding:"required"`
Category string `json:"category" binding:"required"`
Description string `json:"description" binding:"required"`
Billable *bool `json:"billable,omitempty"`
}
// CreateTaskRequest is the request body for creating a task.
type CreateTaskRequest struct {
Title string `json:"title" binding:"required"`
Description string `json:"description"`
Category string `json:"category" binding:"required"`
Priority string `json:"priority"`
DueDate *time.Time `json:"due_date,omitempty"`
}
// UpdateTaskRequest is the request body for updating a task.
type UpdateTaskRequest struct {
Title *string `json:"title,omitempty"`
Description *string `json:"description,omitempty"`
Category *string `json:"category,omitempty"`
Priority *string `json:"priority,omitempty"`
Status *string `json:"status,omitempty"`
DueDate *time.Time `json:"due_date,omitempty"`
}
// CreateCommunicationRequest is the request body for creating a communication entry.
type CreateCommunicationRequest struct {
Direction string `json:"direction" binding:"required"`
Channel string `json:"channel" binding:"required"`
Subject string `json:"subject" binding:"required"`
Content string `json:"content"`
Participants string `json:"participants"`
}
// ============================================================================
// Summary Models
// ============================================================================
// HoursSummary provides aggregated hour statistics for an assignment.
type HoursSummary struct {
TotalHours float64 `json:"total_hours"`
BillableHours float64 `json:"billable_hours"`
ByCategory map[string]float64 `json:"by_category"`
Period string `json:"period"` // YYYY-MM or "all"
}

View File

@@ -1,510 +0,0 @@
package dsb
import (
"context"
"fmt"
"time"
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
"github.com/google/uuid"
"github.com/jackc/pgx/v5/pgxpool"
)
// Store provides database operations for the DSB portal.
type Store struct {
pool *pgxpool.Pool
reportingStore *reporting.Store
}
// NewStore creates a new DSB store.
func NewStore(pool *pgxpool.Pool, reportingStore *reporting.Store) *Store {
return &Store{
pool: pool,
reportingStore: reportingStore,
}
}
// Pool returns the underlying connection pool for direct queries when needed.
func (s *Store) Pool() *pgxpool.Pool {
return s.pool
}
// ============================================================================
// Dashboard
// ============================================================================
// GetDashboard generates the aggregated DSB dashboard for a given DSB user.
func (s *Store) GetDashboard(ctx context.Context, dsbUserID uuid.UUID) (*DSBDashboard, error) {
assignments, err := s.ListAssignments(ctx, dsbUserID)
if err != nil {
return nil, fmt.Errorf("list assignments: %w", err)
}
now := time.Now().UTC()
currentMonth := now.Format("2006-01")
dashboard := &DSBDashboard{
Assignments: make([]AssignmentOverview, 0, len(assignments)),
GeneratedAt: now,
}
for _, a := range assignments {
overview := AssignmentOverview{
Assignment: a,
HoursBudget: a.MonthlyHoursBudget,
}
// Enrich with compliance score (error-tolerant)
if s.reportingStore != nil {
report, err := s.reportingStore.GenerateReport(ctx, a.TenantID)
if err == nil && report != nil {
overview.ComplianceScore = report.ComplianceScore
}
}
// Hours this month
summary, err := s.GetHoursSummary(ctx, a.ID, currentMonth)
if err == nil && summary != nil {
overview.HoursThisMonth = summary.TotalHours
}
// Open and urgent tasks
openTasks, err := s.ListTasks(ctx, a.ID, "open")
if err == nil {
overview.OpenTaskCount = len(openTasks)
for _, t := range openTasks {
if t.Priority == "urgent" {
overview.UrgentTaskCount++
}
if t.DueDate != nil && (overview.NextDeadline == nil || t.DueDate.Before(*overview.NextDeadline)) {
overview.NextDeadline = t.DueDate
}
}
}
// Also count in_progress tasks
inProgressTasks, err := s.ListTasks(ctx, a.ID, "in_progress")
if err == nil {
overview.OpenTaskCount += len(inProgressTasks)
for _, t := range inProgressTasks {
if t.Priority == "urgent" {
overview.UrgentTaskCount++
}
if t.DueDate != nil && (overview.NextDeadline == nil || t.DueDate.Before(*overview.NextDeadline)) {
overview.NextDeadline = t.DueDate
}
}
}
dashboard.Assignments = append(dashboard.Assignments, overview)
dashboard.TotalAssignments++
if a.Status == "active" {
dashboard.ActiveAssignments++
}
dashboard.TotalHoursThisMonth += overview.HoursThisMonth
dashboard.OpenTasks += overview.OpenTaskCount
dashboard.UrgentTasks += overview.UrgentTaskCount
}
return dashboard, nil
}
// ============================================================================
// Assignments
// ============================================================================
// CreateAssignment inserts a new DSB assignment.
func (s *Store) CreateAssignment(ctx context.Context, a *Assignment) error {
a.ID = uuid.New()
now := time.Now().UTC()
a.CreatedAt = now
a.UpdatedAt = now
if a.Status == "" {
a.Status = "active"
}
_, err := s.pool.Exec(ctx, `
INSERT INTO dsb_assignments (id, dsb_user_id, tenant_id, status, contract_start, contract_end, monthly_hours_budget, notes, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
`, a.ID, a.DSBUserID, a.TenantID, a.Status, a.ContractStart, a.ContractEnd, a.MonthlyHoursBudget, a.Notes, a.CreatedAt, a.UpdatedAt)
if err != nil {
return fmt.Errorf("insert assignment: %w", err)
}
return nil
}
// ListAssignments returns all assignments for a given DSB user, joined with tenant info.
func (s *Store) ListAssignments(ctx context.Context, dsbUserID uuid.UUID) ([]Assignment, error) {
rows, err := s.pool.Query(ctx, `
SELECT a.id, a.dsb_user_id, a.tenant_id, ct.name, ct.slug,
a.status, a.contract_start, a.contract_end,
a.monthly_hours_budget, a.notes, a.created_at, a.updated_at
FROM dsb_assignments a
JOIN compliance_tenants ct ON ct.id = a.tenant_id
WHERE a.dsb_user_id = $1
ORDER BY a.created_at DESC
`, dsbUserID)
if err != nil {
return nil, fmt.Errorf("query assignments: %w", err)
}
defer rows.Close()
var assignments []Assignment
for rows.Next() {
var a Assignment
if err := rows.Scan(
&a.ID, &a.DSBUserID, &a.TenantID, &a.TenantName, &a.TenantSlug,
&a.Status, &a.ContractStart, &a.ContractEnd,
&a.MonthlyHoursBudget, &a.Notes, &a.CreatedAt, &a.UpdatedAt,
); err != nil {
return nil, fmt.Errorf("scan assignment: %w", err)
}
assignments = append(assignments, a)
}
if assignments == nil {
assignments = []Assignment{}
}
return assignments, nil
}
// GetAssignment retrieves a single assignment by ID.
func (s *Store) GetAssignment(ctx context.Context, id uuid.UUID) (*Assignment, error) {
var a Assignment
err := s.pool.QueryRow(ctx, `
SELECT a.id, a.dsb_user_id, a.tenant_id, ct.name, ct.slug,
a.status, a.contract_start, a.contract_end,
a.monthly_hours_budget, a.notes, a.created_at, a.updated_at
FROM dsb_assignments a
JOIN compliance_tenants ct ON ct.id = a.tenant_id
WHERE a.id = $1
`, id).Scan(
&a.ID, &a.DSBUserID, &a.TenantID, &a.TenantName, &a.TenantSlug,
&a.Status, &a.ContractStart, &a.ContractEnd,
&a.MonthlyHoursBudget, &a.Notes, &a.CreatedAt, &a.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("get assignment: %w", err)
}
return &a, nil
}
// UpdateAssignment updates an existing assignment.
func (s *Store) UpdateAssignment(ctx context.Context, a *Assignment) error {
_, err := s.pool.Exec(ctx, `
UPDATE dsb_assignments
SET status = $2, contract_end = $3, monthly_hours_budget = $4, notes = $5, updated_at = NOW()
WHERE id = $1
`, a.ID, a.Status, a.ContractEnd, a.MonthlyHoursBudget, a.Notes)
if err != nil {
return fmt.Errorf("update assignment: %w", err)
}
return nil
}
// ============================================================================
// Hours
// ============================================================================
// CreateHourEntry inserts a new time tracking entry.
func (s *Store) CreateHourEntry(ctx context.Context, h *HourEntry) error {
h.ID = uuid.New()
h.CreatedAt = time.Now().UTC()
_, err := s.pool.Exec(ctx, `
INSERT INTO dsb_hours (id, assignment_id, date, hours, category, description, billable, created_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`, h.ID, h.AssignmentID, h.Date, h.Hours, h.Category, h.Description, h.Billable, h.CreatedAt)
if err != nil {
return fmt.Errorf("insert hour entry: %w", err)
}
return nil
}
// ListHours returns time entries for an assignment, optionally filtered by month (YYYY-MM).
func (s *Store) ListHours(ctx context.Context, assignmentID uuid.UUID, month string) ([]HourEntry, error) {
var query string
var args []interface{}
if month != "" {
query = `
SELECT id, assignment_id, date, hours, category, description, billable, created_at
FROM dsb_hours
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
ORDER BY date DESC, created_at DESC
`
args = []interface{}{assignmentID, month}
} else {
query = `
SELECT id, assignment_id, date, hours, category, description, billable, created_at
FROM dsb_hours
WHERE assignment_id = $1
ORDER BY date DESC, created_at DESC
`
args = []interface{}{assignmentID}
}
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("query hours: %w", err)
}
defer rows.Close()
var entries []HourEntry
for rows.Next() {
var h HourEntry
if err := rows.Scan(
&h.ID, &h.AssignmentID, &h.Date, &h.Hours, &h.Category,
&h.Description, &h.Billable, &h.CreatedAt,
); err != nil {
return nil, fmt.Errorf("scan hour entry: %w", err)
}
entries = append(entries, h)
}
if entries == nil {
entries = []HourEntry{}
}
return entries, nil
}
// GetHoursSummary returns aggregated hour statistics for an assignment, optionally filtered by month.
func (s *Store) GetHoursSummary(ctx context.Context, assignmentID uuid.UUID, month string) (*HoursSummary, error) {
summary := &HoursSummary{
ByCategory: make(map[string]float64),
Period: "all",
}
if month != "" {
summary.Period = month
}
// Total and billable hours
var totalQuery string
var totalArgs []interface{}
if month != "" {
totalQuery = `
SELECT COALESCE(SUM(hours), 0), COALESCE(SUM(CASE WHEN billable THEN hours ELSE 0 END), 0)
FROM dsb_hours
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
`
totalArgs = []interface{}{assignmentID, month}
} else {
totalQuery = `
SELECT COALESCE(SUM(hours), 0), COALESCE(SUM(CASE WHEN billable THEN hours ELSE 0 END), 0)
FROM dsb_hours
WHERE assignment_id = $1
`
totalArgs = []interface{}{assignmentID}
}
err := s.pool.QueryRow(ctx, totalQuery, totalArgs...).Scan(&summary.TotalHours, &summary.BillableHours)
if err != nil {
return nil, fmt.Errorf("query hours summary totals: %w", err)
}
// Hours by category
var catQuery string
var catArgs []interface{}
if month != "" {
catQuery = `
SELECT category, COALESCE(SUM(hours), 0)
FROM dsb_hours
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
GROUP BY category
`
catArgs = []interface{}{assignmentID, month}
} else {
catQuery = `
SELECT category, COALESCE(SUM(hours), 0)
FROM dsb_hours
WHERE assignment_id = $1
GROUP BY category
`
catArgs = []interface{}{assignmentID}
}
rows, err := s.pool.Query(ctx, catQuery, catArgs...)
if err != nil {
return nil, fmt.Errorf("query hours by category: %w", err)
}
defer rows.Close()
for rows.Next() {
var cat string
var hours float64
if err := rows.Scan(&cat, &hours); err != nil {
return nil, fmt.Errorf("scan category hours: %w", err)
}
summary.ByCategory[cat] = hours
}
return summary, nil
}
// ============================================================================
// Tasks
// ============================================================================
// CreateTask inserts a new DSB task.
func (s *Store) CreateTask(ctx context.Context, t *Task) error {
t.ID = uuid.New()
now := time.Now().UTC()
t.CreatedAt = now
t.UpdatedAt = now
if t.Status == "" {
t.Status = "open"
}
if t.Priority == "" {
t.Priority = "medium"
}
_, err := s.pool.Exec(ctx, `
INSERT INTO dsb_tasks (id, assignment_id, title, description, category, priority, status, due_date, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
`, t.ID, t.AssignmentID, t.Title, t.Description, t.Category, t.Priority, t.Status, t.DueDate, t.CreatedAt, t.UpdatedAt)
if err != nil {
return fmt.Errorf("insert task: %w", err)
}
return nil
}
// ListTasks returns tasks for an assignment, optionally filtered by status.
func (s *Store) ListTasks(ctx context.Context, assignmentID uuid.UUID, status string) ([]Task, error) {
var query string
var args []interface{}
if status != "" {
query = `
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
FROM dsb_tasks
WHERE assignment_id = $1 AND status = $2
ORDER BY CASE priority
WHEN 'urgent' THEN 1
WHEN 'high' THEN 2
WHEN 'medium' THEN 3
WHEN 'low' THEN 4
ELSE 5
END, due_date ASC NULLS LAST, created_at DESC
`
args = []interface{}{assignmentID, status}
} else {
query = `
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
FROM dsb_tasks
WHERE assignment_id = $1
ORDER BY CASE priority
WHEN 'urgent' THEN 1
WHEN 'high' THEN 2
WHEN 'medium' THEN 3
WHEN 'low' THEN 4
ELSE 5
END, due_date ASC NULLS LAST, created_at DESC
`
args = []interface{}{assignmentID}
}
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("query tasks: %w", err)
}
defer rows.Close()
var tasks []Task
for rows.Next() {
var t Task
if err := rows.Scan(
&t.ID, &t.AssignmentID, &t.Title, &t.Description, &t.Category,
&t.Priority, &t.Status, &t.DueDate, &t.CompletedAt,
&t.CreatedAt, &t.UpdatedAt,
); err != nil {
return nil, fmt.Errorf("scan task: %w", err)
}
tasks = append(tasks, t)
}
if tasks == nil {
tasks = []Task{}
}
return tasks, nil
}
// UpdateTask updates an existing task.
func (s *Store) UpdateTask(ctx context.Context, t *Task) error {
_, err := s.pool.Exec(ctx, `
UPDATE dsb_tasks
SET title = $2, description = $3, category = $4, priority = $5, status = $6, due_date = $7, updated_at = NOW()
WHERE id = $1
`, t.ID, t.Title, t.Description, t.Category, t.Priority, t.Status, t.DueDate)
if err != nil {
return fmt.Errorf("update task: %w", err)
}
return nil
}
// CompleteTask marks a task as completed with the current timestamp.
func (s *Store) CompleteTask(ctx context.Context, taskID uuid.UUID) error {
_, err := s.pool.Exec(ctx, `
UPDATE dsb_tasks
SET status = 'completed', completed_at = NOW(), updated_at = NOW()
WHERE id = $1
`, taskID)
if err != nil {
return fmt.Errorf("complete task: %w", err)
}
return nil
}
// ============================================================================
// Communications
// ============================================================================
// CreateCommunication inserts a new communication log entry.
func (s *Store) CreateCommunication(ctx context.Context, c *Communication) error {
c.ID = uuid.New()
c.CreatedAt = time.Now().UTC()
_, err := s.pool.Exec(ctx, `
INSERT INTO dsb_communications (id, assignment_id, direction, channel, subject, content, participants, created_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`, c.ID, c.AssignmentID, c.Direction, c.Channel, c.Subject, c.Content, c.Participants, c.CreatedAt)
if err != nil {
return fmt.Errorf("insert communication: %w", err)
}
return nil
}
// ListCommunications returns all communication entries for an assignment.
func (s *Store) ListCommunications(ctx context.Context, assignmentID uuid.UUID) ([]Communication, error) {
rows, err := s.pool.Query(ctx, `
SELECT id, assignment_id, direction, channel, subject, content, participants, created_at
FROM dsb_communications
WHERE assignment_id = $1
ORDER BY created_at DESC
`, assignmentID)
if err != nil {
return nil, fmt.Errorf("query communications: %w", err)
}
defer rows.Close()
var comms []Communication
for rows.Next() {
var c Communication
if err := rows.Scan(
&c.ID, &c.AssignmentID, &c.Direction, &c.Channel,
&c.Subject, &c.Content, &c.Participants, &c.CreatedAt,
); err != nil {
return nil, fmt.Errorf("scan communication: %w", err)
}
comms = append(comms, c)
}
if comms == nil {
comms = []Communication{}
}
return comms, nil
}

View File

@@ -1,395 +0,0 @@
package funding
import (
"archive/zip"
"bytes"
"fmt"
"io"
"time"
"github.com/jung-kurt/gofpdf"
"github.com/xuri/excelize/v2"
)
// ExportService handles document generation
type ExportService struct{}
// NewExportService creates a new export service
func NewExportService() *ExportService {
return &ExportService{}
}
// GenerateApplicationLetter generates the main application letter as PDF
func (s *ExportService) GenerateApplicationLetter(app *FundingApplication) ([]byte, error) {
pdf := gofpdf.New("P", "mm", "A4", "")
pdf.SetMargins(25, 25, 25)
pdf.AddPage()
// Header
pdf.SetFont("Helvetica", "B", 14)
pdf.Cell(0, 10, "Antrag auf Foerderung im Rahmen der digitalen Bildungsinfrastruktur")
pdf.Ln(15)
// Application number
pdf.SetFont("Helvetica", "", 10)
pdf.Cell(0, 6, fmt.Sprintf("Antragsnummer: %s", app.ApplicationNumber))
pdf.Ln(6)
pdf.Cell(0, 6, fmt.Sprintf("Datum: %s", time.Now().Format("02.01.2006")))
pdf.Ln(15)
// Section 1: Einleitung
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "1. Einleitung")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.SchoolProfile != nil {
pdf.MultiCell(0, 6, fmt.Sprintf(
"Die %s (Schulnummer: %s) beantragt hiermit Foerdermittel aus dem Programm %s.\n\n"+
"Schultraeger: %s\n"+
"Schulform: %s\n"+
"Schueleranzahl: %d\n"+
"Lehrkraefte: %d",
app.SchoolProfile.Name,
app.SchoolProfile.SchoolNumber,
app.FundingProgram,
app.SchoolProfile.CarrierName,
app.SchoolProfile.Type,
app.SchoolProfile.StudentCount,
app.SchoolProfile.TeacherCount,
), "", "", false)
}
pdf.Ln(10)
// Section 2: Projektziel
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "2. Projektziel")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.ProjectPlan != nil {
pdf.MultiCell(0, 6, app.ProjectPlan.Summary, "", "", false)
pdf.Ln(5)
pdf.MultiCell(0, 6, app.ProjectPlan.Goals, "", "", false)
}
pdf.Ln(10)
// Section 3: Beschreibung der Massnahme
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "3. Beschreibung der Massnahme")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.ProjectPlan != nil {
pdf.MultiCell(0, 6, app.ProjectPlan.DidacticConcept, "", "", false)
}
pdf.Ln(10)
// Section 4: Datenschutz & IT-Betrieb
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "4. Datenschutz & IT-Betrieb")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.ProjectPlan != nil && app.ProjectPlan.DataProtection != "" {
pdf.MultiCell(0, 6, app.ProjectPlan.DataProtection, "", "", false)
}
pdf.Ln(10)
// Section 5: Kosten & Finanzierung
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "5. Kosten & Finanzierung")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.Budget != nil {
pdf.Cell(0, 6, fmt.Sprintf("Gesamtkosten: %.2f EUR", app.Budget.TotalCost))
pdf.Ln(6)
pdf.Cell(0, 6, fmt.Sprintf("Beantragter Foerderbetrag: %.2f EUR (%.0f%%)", app.Budget.RequestedFunding, app.Budget.FundingRate*100))
pdf.Ln(6)
pdf.Cell(0, 6, fmt.Sprintf("Eigenanteil: %.2f EUR", app.Budget.OwnContribution))
}
pdf.Ln(10)
// Section 6: Laufzeit
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "6. Laufzeit")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.Timeline != nil {
pdf.Cell(0, 6, fmt.Sprintf("Projektbeginn: %s", app.Timeline.PlannedStart.Format("02.01.2006")))
pdf.Ln(6)
pdf.Cell(0, 6, fmt.Sprintf("Projektende: %s", app.Timeline.PlannedEnd.Format("02.01.2006")))
}
pdf.Ln(15)
// Footer note
pdf.SetFont("Helvetica", "I", 9)
pdf.MultiCell(0, 5, "Hinweis: Dieser Antrag wurde mit dem Foerderantrag-Wizard von BreakPilot erstellt. "+
"Die finale Pruefung und Einreichung erfolgt durch den Schultraeger.", "", "", false)
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// GenerateBudgetPlan generates the budget plan as XLSX
func (s *ExportService) GenerateBudgetPlan(app *FundingApplication) ([]byte, error) {
f := excelize.NewFile()
sheetName := "Kostenplan"
f.SetSheetName("Sheet1", sheetName)
// Header row
headers := []string{
"Pos.", "Kategorie", "Beschreibung", "Hersteller",
"Anzahl", "Einzelpreis", "Gesamt", "Foerderfahig", "Finanzierung",
}
for i, h := range headers {
cell, _ := excelize.CoordinatesToCellName(i+1, 1)
f.SetCellValue(sheetName, cell, h)
}
// Style header
headerStyle, _ := f.NewStyle(&excelize.Style{
Font: &excelize.Font{Bold: true},
Fill: excelize.Fill{Type: "pattern", Color: []string{"#E0E0E0"}, Pattern: 1},
})
f.SetRowStyle(sheetName, 1, 1, headerStyle)
// Data rows
row := 2
if app.Budget != nil {
for i, item := range app.Budget.BudgetItems {
f.SetCellValue(sheetName, fmt.Sprintf("A%d", row), i+1)
f.SetCellValue(sheetName, fmt.Sprintf("B%d", row), string(item.Category))
f.SetCellValue(sheetName, fmt.Sprintf("C%d", row), item.Description)
f.SetCellValue(sheetName, fmt.Sprintf("D%d", row), item.Manufacturer)
f.SetCellValue(sheetName, fmt.Sprintf("E%d", row), item.Quantity)
f.SetCellValue(sheetName, fmt.Sprintf("F%d", row), item.UnitPrice)
f.SetCellValue(sheetName, fmt.Sprintf("G%d", row), item.TotalPrice)
fundable := "Nein"
if item.IsFundable {
fundable = "Ja"
}
f.SetCellValue(sheetName, fmt.Sprintf("H%d", row), fundable)
f.SetCellValue(sheetName, fmt.Sprintf("I%d", row), item.FundingSource)
row++
}
// Summary rows
row += 2
f.SetCellValue(sheetName, fmt.Sprintf("F%d", row), "Gesamtkosten:")
f.SetCellValue(sheetName, fmt.Sprintf("G%d", row), app.Budget.TotalCost)
row++
f.SetCellValue(sheetName, fmt.Sprintf("F%d", row), "Foerderbetrag:")
f.SetCellValue(sheetName, fmt.Sprintf("G%d", row), app.Budget.RequestedFunding)
row++
f.SetCellValue(sheetName, fmt.Sprintf("F%d", row), "Eigenanteil:")
f.SetCellValue(sheetName, fmt.Sprintf("G%d", row), app.Budget.OwnContribution)
}
// Set column widths
f.SetColWidth(sheetName, "A", "A", 6)
f.SetColWidth(sheetName, "B", "B", 15)
f.SetColWidth(sheetName, "C", "C", 35)
f.SetColWidth(sheetName, "D", "D", 15)
f.SetColWidth(sheetName, "E", "E", 8)
f.SetColWidth(sheetName, "F", "F", 12)
f.SetColWidth(sheetName, "G", "G", 12)
f.SetColWidth(sheetName, "H", "H", 12)
f.SetColWidth(sheetName, "I", "I", 15)
// Add currency format
currencyStyle, _ := f.NewStyle(&excelize.Style{
NumFmt: 44, // Currency format
})
f.SetColStyle(sheetName, "F", currencyStyle)
f.SetColStyle(sheetName, "G", currencyStyle)
var buf bytes.Buffer
if err := f.Write(&buf); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// GenerateDataProtectionConcept generates the data protection concept as PDF
func (s *ExportService) GenerateDataProtectionConcept(app *FundingApplication) ([]byte, error) {
pdf := gofpdf.New("P", "mm", "A4", "")
pdf.SetMargins(25, 25, 25)
pdf.AddPage()
// Header
pdf.SetFont("Helvetica", "B", 14)
pdf.Cell(0, 10, "Datenschutz- und Betriebskonzept")
pdf.Ln(15)
pdf.SetFont("Helvetica", "", 10)
pdf.Cell(0, 6, fmt.Sprintf("Antragsnummer: %s", app.ApplicationNumber))
pdf.Ln(6)
if app.SchoolProfile != nil {
pdf.Cell(0, 6, fmt.Sprintf("Schule: %s", app.SchoolProfile.Name))
}
pdf.Ln(15)
// Section: Lokale Verarbeitung
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "1. Grundsaetze der Datenverarbeitung")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.ProjectPlan != nil && app.ProjectPlan.DataProtection != "" {
pdf.MultiCell(0, 6, app.ProjectPlan.DataProtection, "", "", false)
} else {
pdf.MultiCell(0, 6, "Das Projekt setzt auf eine vollstaendig lokale Datenverarbeitung:\n\n"+
"- Alle Daten werden ausschliesslich auf den schuleigenen Systemen verarbeitet\n"+
"- Keine Uebermittlung personenbezogener Daten an externe Dienste\n"+
"- Keine Cloud-Speicherung sensibler Daten\n"+
"- Betrieb im Verantwortungsbereich der Schule", "", "", false)
}
pdf.Ln(10)
// Section: Technische Massnahmen
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "2. Technische und organisatorische Massnahmen")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
pdf.MultiCell(0, 6, "Folgende TOMs werden umgesetzt:\n\n"+
"- Zugriffskontrolle ueber schuleigene Benutzerverwaltung\n"+
"- Verschluesselte Datenspeicherung\n"+
"- Regelmaessige Sicherheitsupdates\n"+
"- Protokollierung von Zugriffen\n"+
"- Automatische Loeschung nach definierten Fristen", "", "", false)
pdf.Ln(10)
// Section: Betriebskonzept
pdf.SetFont("Helvetica", "B", 12)
pdf.Cell(0, 8, "3. Betriebskonzept")
pdf.Ln(10)
pdf.SetFont("Helvetica", "", 10)
if app.ProjectPlan != nil && app.ProjectPlan.MaintenancePlan != "" {
pdf.MultiCell(0, 6, app.ProjectPlan.MaintenancePlan, "", "", false)
} else {
pdf.MultiCell(0, 6, "Der laufende Betrieb wird wie folgt sichergestellt:\n\n"+
"- Schulung des technischen Personals\n"+
"- Dokumentierte Betriebsverfahren\n"+
"- Regelmaessige Wartung und Updates\n"+
"- Definierte Ansprechpartner", "", "", false)
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// GenerateExportBundle generates a ZIP file with all documents
func (s *ExportService) GenerateExportBundle(app *FundingApplication) ([]byte, error) {
var buf bytes.Buffer
zipWriter := zip.NewWriter(&buf)
// Generate and add application letter
letter, err := s.GenerateApplicationLetter(app)
if err == nil {
w, _ := zipWriter.Create(fmt.Sprintf("%s_Antragsschreiben.pdf", app.ApplicationNumber))
w.Write(letter)
}
// Generate and add budget plan
budget, err := s.GenerateBudgetPlan(app)
if err == nil {
w, _ := zipWriter.Create(fmt.Sprintf("%s_Kostenplan.xlsx", app.ApplicationNumber))
w.Write(budget)
}
// Generate and add data protection concept
dp, err := s.GenerateDataProtectionConcept(app)
if err == nil {
w, _ := zipWriter.Create(fmt.Sprintf("%s_Datenschutzkonzept.pdf", app.ApplicationNumber))
w.Write(dp)
}
// Add attachments
for _, attachment := range app.Attachments {
// Read attachment from storage and add to ZIP
// This would need actual file system access
_ = attachment
}
if err := zipWriter.Close(); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// ExportDocument represents a generated document
type GeneratedDocument struct {
Name string
Type string // pdf, xlsx, docx
Content []byte
MimeType string
}
// GenerateAllDocuments generates all documents for an application
func (s *ExportService) GenerateAllDocuments(app *FundingApplication) ([]GeneratedDocument, error) {
var docs []GeneratedDocument
// Application letter
letter, err := s.GenerateApplicationLetter(app)
if err == nil {
docs = append(docs, GeneratedDocument{
Name: fmt.Sprintf("%s_Antragsschreiben.pdf", app.ApplicationNumber),
Type: "pdf",
Content: letter,
MimeType: "application/pdf",
})
}
// Budget plan
budget, err := s.GenerateBudgetPlan(app)
if err == nil {
docs = append(docs, GeneratedDocument{
Name: fmt.Sprintf("%s_Kostenplan.xlsx", app.ApplicationNumber),
Type: "xlsx",
Content: budget,
MimeType: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
})
}
// Data protection concept
dp, err := s.GenerateDataProtectionConcept(app)
if err == nil {
docs = append(docs, GeneratedDocument{
Name: fmt.Sprintf("%s_Datenschutzkonzept.pdf", app.ApplicationNumber),
Type: "pdf",
Content: dp,
MimeType: "application/pdf",
})
}
return docs, nil
}
// WriteZipToWriter writes all documents to a zip writer
func (s *ExportService) WriteZipToWriter(app *FundingApplication, w io.Writer) error {
zipWriter := zip.NewWriter(w)
defer zipWriter.Close()
docs, err := s.GenerateAllDocuments(app)
if err != nil {
return err
}
for _, doc := range docs {
f, err := zipWriter.Create(doc.Name)
if err != nil {
continue
}
f.Write(doc.Content)
}
return nil
}

View File

@@ -1,394 +0,0 @@
package funding
import (
"time"
"github.com/google/uuid"
)
// ============================================================================
// Constants / Enums
// ============================================================================
// FundingProgram represents the type of funding program
type FundingProgram string
const (
FundingProgramDigitalPakt1 FundingProgram = "DIGITALPAKT_1"
FundingProgramDigitalPakt2 FundingProgram = "DIGITALPAKT_2"
FundingProgramLandesfoerderung FundingProgram = "LANDESFOERDERUNG"
FundingProgramSchultraeger FundingProgram = "SCHULTRAEGER"
FundingProgramSonstige FundingProgram = "SONSTIGE"
)
// ApplicationStatus represents the workflow status
type ApplicationStatus string
const (
ApplicationStatusDraft ApplicationStatus = "DRAFT"
ApplicationStatusInProgress ApplicationStatus = "IN_PROGRESS"
ApplicationStatusReview ApplicationStatus = "REVIEW"
ApplicationStatusSubmitted ApplicationStatus = "SUBMITTED"
ApplicationStatusApproved ApplicationStatus = "APPROVED"
ApplicationStatusRejected ApplicationStatus = "REJECTED"
ApplicationStatusArchived ApplicationStatus = "ARCHIVED"
)
// FederalState represents German federal states
type FederalState string
const (
FederalStateNI FederalState = "NI" // Niedersachsen
FederalStateNRW FederalState = "NRW" // Nordrhein-Westfalen
FederalStateBAY FederalState = "BAY" // Bayern
FederalStateBW FederalState = "BW" // Baden-Wuerttemberg
FederalStateHE FederalState = "HE" // Hessen
FederalStateSN FederalState = "SN" // Sachsen
FederalStateTH FederalState = "TH" // Thueringen
FederalStateSA FederalState = "SA" // Sachsen-Anhalt
FederalStateBB FederalState = "BB" // Brandenburg
FederalStateMV FederalState = "MV" // Mecklenburg-Vorpommern
FederalStateSH FederalState = "SH" // Schleswig-Holstein
FederalStateHH FederalState = "HH" // Hamburg
FederalStateHB FederalState = "HB" // Bremen
FederalStateBE FederalState = "BE" // Berlin
FederalStateSL FederalState = "SL" // Saarland
FederalStateRP FederalState = "RP" // Rheinland-Pfalz
)
// SchoolType represents different school types
type SchoolType string
const (
SchoolTypeGrundschule SchoolType = "GRUNDSCHULE"
SchoolTypeHauptschule SchoolType = "HAUPTSCHULE"
SchoolTypeRealschule SchoolType = "REALSCHULE"
SchoolTypeGymnasium SchoolType = "GYMNASIUM"
SchoolTypeGesamtschule SchoolType = "GESAMTSCHULE"
SchoolTypeOberschule SchoolType = "OBERSCHULE"
SchoolTypeFoerderschule SchoolType = "FOERDERSCHULE"
SchoolTypeBerufsschule SchoolType = "BERUFSSCHULE"
SchoolTypeBerufskolleg SchoolType = "BERUFSKOLLEG"
SchoolTypeFachoberschule SchoolType = "FACHOBERSCHULE"
SchoolTypeBerufliches SchoolType = "BERUFLICHES_GYMNASIUM"
SchoolTypeSonstige SchoolType = "SONSTIGE"
)
// CarrierType represents the school carrier type
type CarrierType string
const (
CarrierTypePublic CarrierType = "PUBLIC" // Oeffentlich
CarrierTypePrivate CarrierType = "PRIVATE" // Privat
CarrierTypeChurch CarrierType = "CHURCH" // Kirchlich
CarrierTypeNonProfit CarrierType = "NON_PROFIT" // Gemeinnuetzig
)
// BudgetCategory represents categories for budget items
type BudgetCategory string
const (
BudgetCategoryNetwork BudgetCategory = "NETWORK" // Netzwerk/Verkabelung
BudgetCategoryWLAN BudgetCategory = "WLAN" // WLAN-Infrastruktur
BudgetCategoryDevices BudgetCategory = "DEVICES" // Endgeraete
BudgetCategoryPresentation BudgetCategory = "PRESENTATION" // Praesentationstechnik
BudgetCategorySoftware BudgetCategory = "SOFTWARE" // Software-Lizenzen
BudgetCategoryServer BudgetCategory = "SERVER" // Server/Rechenzentrum
BudgetCategoryServices BudgetCategory = "SERVICES" // Dienstleistungen
BudgetCategoryTraining BudgetCategory = "TRAINING" // Schulungen
BudgetCategorySonstige BudgetCategory = "SONSTIGE" // Sonstige
)
// ============================================================================
// Main Entities
// ============================================================================
// FundingApplication represents a funding application
type FundingApplication struct {
ID uuid.UUID `json:"id"`
TenantID uuid.UUID `json:"tenant_id"`
ApplicationNumber string `json:"application_number"` // e.g., DP2-NI-2026-00123
Title string `json:"title"`
FundingProgram FundingProgram `json:"funding_program"`
Status ApplicationStatus `json:"status"`
// Wizard State
CurrentStep int `json:"current_step"`
TotalSteps int `json:"total_steps"`
WizardData map[string]interface{} `json:"wizard_data,omitempty"`
// School Information
SchoolProfile *SchoolProfile `json:"school_profile,omitempty"`
// Project Information
ProjectPlan *ProjectPlan `json:"project_plan,omitempty"`
Budget *Budget `json:"budget,omitempty"`
Timeline *ProjectTimeline `json:"timeline,omitempty"`
// Financial Summary
RequestedAmount float64 `json:"requested_amount"`
OwnContribution float64 `json:"own_contribution"`
ApprovedAmount *float64 `json:"approved_amount,omitempty"`
// Attachments
Attachments []Attachment `json:"attachments,omitempty"`
// Audit Trail
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
SubmittedAt *time.Time `json:"submitted_at,omitempty"`
CreatedBy uuid.UUID `json:"created_by"`
UpdatedBy uuid.UUID `json:"updated_by"`
}
// SchoolProfile contains school information
type SchoolProfile struct {
Name string `json:"name"`
SchoolNumber string `json:"school_number"` // Official school number
Type SchoolType `json:"type"`
FederalState FederalState `json:"federal_state"`
Address Address `json:"address"`
ContactPerson ContactPerson `json:"contact_person"`
StudentCount int `json:"student_count"`
TeacherCount int `json:"teacher_count"`
ClassCount int `json:"class_count"`
CarrierType CarrierType `json:"carrier_type"`
CarrierName string `json:"carrier_name"`
CarrierAddress *Address `json:"carrier_address,omitempty"`
Infrastructure *InfrastructureStatus `json:"infrastructure,omitempty"`
}
// Address represents a postal address
type Address struct {
Street string `json:"street"`
HouseNo string `json:"house_no"`
PostalCode string `json:"postal_code"`
City string `json:"city"`
Country string `json:"country,omitempty"`
}
// ContactPerson represents a contact person
type ContactPerson struct {
Salutation string `json:"salutation,omitempty"` // Herr/Frau
Title string `json:"title,omitempty"` // Dr., Prof.
FirstName string `json:"first_name"`
LastName string `json:"last_name"`
Position string `json:"position,omitempty"` // Schulleitung, IT-Beauftragter
Email string `json:"email"`
Phone string `json:"phone,omitempty"`
}
// InfrastructureStatus describes current IT infrastructure
type InfrastructureStatus struct {
HasWLAN bool `json:"has_wlan"`
WLANCoverage int `json:"wlan_coverage"` // Percentage 0-100
HasStructuredCabling bool `json:"has_structured_cabling"`
InternetBandwidth string `json:"internet_bandwidth"` // e.g., "100 Mbit/s"
DeviceCount int `json:"device_count"` // Current devices
HasServerRoom bool `json:"has_server_room"`
Notes string `json:"notes,omitempty"`
}
// ProjectPlan describes the project
type ProjectPlan struct {
ProjectName string `json:"project_name"`
Summary string `json:"summary"` // Kurzbeschreibung
Goals string `json:"goals"` // Projektziele
DidacticConcept string `json:"didactic_concept"` // Paedagogisches Konzept
MEPReference string `json:"mep_reference,omitempty"` // Medienentwicklungsplan Bezug
DataProtection string `json:"data_protection"` // Datenschutzkonzept
MaintenancePlan string `json:"maintenance_plan"` // Wartungs-/Betriebskonzept
TargetGroups []string `json:"target_groups"` // e.g., ["Schueler", "Lehrer"]
SubjectsAffected []string `json:"subjects_affected,omitempty"` // Betroffene Faecher
}
// Budget represents the financial plan
type Budget struct {
TotalCost float64 `json:"total_cost"`
RequestedFunding float64 `json:"requested_funding"`
OwnContribution float64 `json:"own_contribution"`
OtherFunding float64 `json:"other_funding"`
FundingRate float64 `json:"funding_rate"` // 0.90 = 90%
BudgetItems []BudgetItem `json:"budget_items"`
IsWithinLimits bool `json:"is_within_limits"`
Justification string `json:"justification,omitempty"` // Begruendung
}
// BudgetItem represents a single budget line item
type BudgetItem struct {
ID uuid.UUID `json:"id"`
Position int `json:"position"` // Order number
Category BudgetCategory `json:"category"`
Description string `json:"description"`
Manufacturer string `json:"manufacturer,omitempty"`
ProductName string `json:"product_name,omitempty"`
Quantity int `json:"quantity"`
UnitPrice float64 `json:"unit_price"`
TotalPrice float64 `json:"total_price"`
IsFundable bool `json:"is_fundable"` // Foerderfahig Ja/Nein
FundingSource string `json:"funding_source"` // digitalpakt, eigenanteil, sonstige
Notes string `json:"notes,omitempty"`
}
// ProjectTimeline represents project schedule
type ProjectTimeline struct {
PlannedStart time.Time `json:"planned_start"`
PlannedEnd time.Time `json:"planned_end"`
Milestones []Milestone `json:"milestones,omitempty"`
ProjectPhase string `json:"project_phase,omitempty"` // Current phase
}
// Milestone represents a project milestone
type Milestone struct {
ID uuid.UUID `json:"id"`
Title string `json:"title"`
Description string `json:"description,omitempty"`
DueDate time.Time `json:"due_date"`
CompletedAt *time.Time `json:"completed_at,omitempty"`
Status string `json:"status"` // planned, in_progress, completed
}
// Attachment represents an uploaded file
type Attachment struct {
ID uuid.UUID `json:"id"`
FileName string `json:"file_name"`
FileType string `json:"file_type"` // pdf, docx, xlsx, jpg, png
FileSize int64 `json:"file_size"` // bytes
Category string `json:"category"` // angebot, mep, nachweis, sonstiges
Description string `json:"description,omitempty"`
StoragePath string `json:"-"` // Internal path, not exposed
UploadedAt time.Time `json:"uploaded_at"`
UploadedBy uuid.UUID `json:"uploaded_by"`
}
// ============================================================================
// Wizard Step Data
// ============================================================================
// WizardStep represents a single wizard step
type WizardStep struct {
Number int `json:"number"`
Title string `json:"title"`
Description string `json:"description"`
Fields []string `json:"fields"` // Field IDs for this step
IsCompleted bool `json:"is_completed"`
IsRequired bool `json:"is_required"`
HelpContext string `json:"help_context"` // Context for LLM assistant
}
// WizardProgress tracks wizard completion
type WizardProgress struct {
CurrentStep int `json:"current_step"`
TotalSteps int `json:"total_steps"`
CompletedSteps []int `json:"completed_steps"`
StepValidation map[int][]string `json:"step_validation,omitempty"` // Errors per step
FormData map[string]interface{} `json:"form_data"`
LastSavedAt time.Time `json:"last_saved_at"`
}
// ============================================================================
// BreakPilot Presets
// ============================================================================
// ProductPreset represents a BreakPilot product preset
type ProductPreset struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
BudgetItems []BudgetItem `json:"budget_items"`
AutoFill map[string]interface{} `json:"auto_fill"`
DataProtection string `json:"data_protection"`
}
// ============================================================================
// Export Structures
// ============================================================================
// ExportDocument represents a generated document
type ExportDocument struct {
Type string `json:"type"` // antragsschreiben, kostenplan, datenschutz
Format string `json:"format"` // pdf, docx, xlsx
FileName string `json:"file_name"`
GeneratedAt time.Time `json:"generated_at"`
ContentHash string `json:"content_hash"`
StoragePath string `json:"-"`
}
// ExportBundle represents a ZIP bundle of all documents
type ExportBundle struct {
ID uuid.UUID `json:"id"`
ApplicationID uuid.UUID `json:"application_id"`
Documents []ExportDocument `json:"documents"`
GeneratedAt time.Time `json:"generated_at"`
DownloadURL string `json:"download_url"`
ExpiresAt time.Time `json:"expires_at"`
}
// ============================================================================
// LLM Assistant
// ============================================================================
// AssistantMessage represents a chat message with the assistant
type AssistantMessage struct {
Role string `json:"role"` // user, assistant, system
Content string `json:"content"`
Step int `json:"step,omitempty"` // Current wizard step
}
// AssistantRequest for asking questions
type AssistantRequest struct {
ApplicationID uuid.UUID `json:"application_id"`
Question string `json:"question"`
CurrentStep int `json:"current_step"`
Context map[string]interface{} `json:"context,omitempty"`
History []AssistantMessage `json:"history,omitempty"`
}
// AssistantResponse from the assistant
type AssistantResponse struct {
Answer string `json:"answer"`
Suggestions []string `json:"suggestions,omitempty"`
References []string `json:"references,omitempty"` // Links to help resources
FormFills map[string]interface{} `json:"form_fills,omitempty"` // Suggested form values
}
// ============================================================================
// API Request/Response Types
// ============================================================================
// CreateApplicationRequest for creating a new application
type CreateApplicationRequest struct {
Title string `json:"title"`
FundingProgram FundingProgram `json:"funding_program"`
FederalState FederalState `json:"federal_state"`
PresetID string `json:"preset_id,omitempty"` // Optional BreakPilot preset
}
// UpdateApplicationRequest for updating an application
type UpdateApplicationRequest struct {
Title *string `json:"title,omitempty"`
WizardData map[string]interface{} `json:"wizard_data,omitempty"`
CurrentStep *int `json:"current_step,omitempty"`
}
// SaveWizardStepRequest for saving a wizard step
type SaveWizardStepRequest struct {
Step int `json:"step"`
Data map[string]interface{} `json:"data"`
Complete bool `json:"complete"` // Mark step as complete
}
// ApplicationListResponse for list endpoints
type ApplicationListResponse struct {
Applications []FundingApplication `json:"applications"`
Total int `json:"total"`
Page int `json:"page"`
PageSize int `json:"page_size"`
}
// ExportRequest for export endpoints
type ExportRequest struct {
Format string `json:"format"` // zip, pdf, docx
Documents []string `json:"documents"` // Which documents to include
Language string `json:"language"` // de, en
}

View File

@@ -1,652 +0,0 @@
package funding
import (
"context"
"encoding/json"
"errors"
"fmt"
"time"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgxpool"
)
// PostgresStore implements Store using PostgreSQL
type PostgresStore struct {
pool *pgxpool.Pool
}
// NewPostgresStore creates a new PostgreSQL store
func NewPostgresStore(pool *pgxpool.Pool) *PostgresStore {
return &PostgresStore{pool: pool}
}
// CreateApplication creates a new funding application
func (s *PostgresStore) CreateApplication(ctx context.Context, app *FundingApplication) error {
app.ID = uuid.New()
app.CreatedAt = time.Now()
app.UpdatedAt = time.Now()
app.TotalSteps = 8 // Default 8-step wizard
// Generate application number
app.ApplicationNumber = s.generateApplicationNumber(app.FundingProgram, app.SchoolProfile)
// Marshal JSON fields
wizardDataJSON, err := json.Marshal(app.WizardData)
if err != nil {
return fmt.Errorf("failed to marshal wizard data: %w", err)
}
schoolProfileJSON, err := json.Marshal(app.SchoolProfile)
if err != nil {
return fmt.Errorf("failed to marshal school profile: %w", err)
}
projectPlanJSON, err := json.Marshal(app.ProjectPlan)
if err != nil {
return fmt.Errorf("failed to marshal project plan: %w", err)
}
budgetJSON, err := json.Marshal(app.Budget)
if err != nil {
return fmt.Errorf("failed to marshal budget: %w", err)
}
timelineJSON, err := json.Marshal(app.Timeline)
if err != nil {
return fmt.Errorf("failed to marshal timeline: %w", err)
}
query := `
INSERT INTO funding_applications (
id, tenant_id, application_number, title, funding_program, status,
current_step, total_steps, wizard_data,
school_profile, project_plan, budget, timeline,
requested_amount, own_contribution,
created_at, updated_at, created_by, updated_by
) VALUES (
$1, $2, $3, $4, $5, $6,
$7, $8, $9,
$10, $11, $12, $13,
$14, $15,
$16, $17, $18, $19
)
`
_, err = s.pool.Exec(ctx, query,
app.ID, app.TenantID, app.ApplicationNumber, app.Title, app.FundingProgram, app.Status,
app.CurrentStep, app.TotalSteps, wizardDataJSON,
schoolProfileJSON, projectPlanJSON, budgetJSON, timelineJSON,
app.RequestedAmount, app.OwnContribution,
app.CreatedAt, app.UpdatedAt, app.CreatedBy, app.UpdatedBy,
)
if err != nil {
return fmt.Errorf("failed to create application: %w", err)
}
return nil
}
// GetApplication retrieves an application by ID
func (s *PostgresStore) GetApplication(ctx context.Context, id uuid.UUID) (*FundingApplication, error) {
query := `
SELECT
id, tenant_id, application_number, title, funding_program, status,
current_step, total_steps, wizard_data,
school_profile, project_plan, budget, timeline,
requested_amount, own_contribution, approved_amount,
created_at, updated_at, submitted_at, created_by, updated_by
FROM funding_applications
WHERE id = $1
`
var app FundingApplication
var wizardDataJSON, schoolProfileJSON, projectPlanJSON, budgetJSON, timelineJSON []byte
err := s.pool.QueryRow(ctx, query, id).Scan(
&app.ID, &app.TenantID, &app.ApplicationNumber, &app.Title, &app.FundingProgram, &app.Status,
&app.CurrentStep, &app.TotalSteps, &wizardDataJSON,
&schoolProfileJSON, &projectPlanJSON, &budgetJSON, &timelineJSON,
&app.RequestedAmount, &app.OwnContribution, &app.ApprovedAmount,
&app.CreatedAt, &app.UpdatedAt, &app.SubmittedAt, &app.CreatedBy, &app.UpdatedBy,
)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("application not found: %s", id)
}
return nil, fmt.Errorf("failed to get application: %w", err)
}
// Unmarshal JSON fields
if len(wizardDataJSON) > 0 {
if err := json.Unmarshal(wizardDataJSON, &app.WizardData); err != nil {
return nil, fmt.Errorf("failed to unmarshal wizard data: %w", err)
}
}
if len(schoolProfileJSON) > 0 {
app.SchoolProfile = &SchoolProfile{}
if err := json.Unmarshal(schoolProfileJSON, app.SchoolProfile); err != nil {
return nil, fmt.Errorf("failed to unmarshal school profile: %w", err)
}
}
if len(projectPlanJSON) > 0 {
app.ProjectPlan = &ProjectPlan{}
if err := json.Unmarshal(projectPlanJSON, app.ProjectPlan); err != nil {
return nil, fmt.Errorf("failed to unmarshal project plan: %w", err)
}
}
if len(budgetJSON) > 0 {
app.Budget = &Budget{}
if err := json.Unmarshal(budgetJSON, app.Budget); err != nil {
return nil, fmt.Errorf("failed to unmarshal budget: %w", err)
}
}
if len(timelineJSON) > 0 {
app.Timeline = &ProjectTimeline{}
if err := json.Unmarshal(timelineJSON, app.Timeline); err != nil {
return nil, fmt.Errorf("failed to unmarshal timeline: %w", err)
}
}
// Load attachments
attachments, err := s.GetAttachments(ctx, id)
if err == nil {
app.Attachments = attachments
}
return &app, nil
}
// GetApplicationByNumber retrieves an application by number
func (s *PostgresStore) GetApplicationByNumber(ctx context.Context, number string) (*FundingApplication, error) {
query := `SELECT id FROM funding_applications WHERE application_number = $1`
var id uuid.UUID
err := s.pool.QueryRow(ctx, query, number).Scan(&id)
if err != nil {
if errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("application not found: %s", number)
}
return nil, fmt.Errorf("failed to find application by number: %w", err)
}
return s.GetApplication(ctx, id)
}
// UpdateApplication updates an existing application
func (s *PostgresStore) UpdateApplication(ctx context.Context, app *FundingApplication) error {
app.UpdatedAt = time.Now()
// Marshal JSON fields
wizardDataJSON, _ := json.Marshal(app.WizardData)
schoolProfileJSON, _ := json.Marshal(app.SchoolProfile)
projectPlanJSON, _ := json.Marshal(app.ProjectPlan)
budgetJSON, _ := json.Marshal(app.Budget)
timelineJSON, _ := json.Marshal(app.Timeline)
query := `
UPDATE funding_applications SET
title = $2, funding_program = $3, status = $4,
current_step = $5, wizard_data = $6,
school_profile = $7, project_plan = $8, budget = $9, timeline = $10,
requested_amount = $11, own_contribution = $12, approved_amount = $13,
updated_at = $14, submitted_at = $15, updated_by = $16
WHERE id = $1
`
result, err := s.pool.Exec(ctx, query,
app.ID, app.Title, app.FundingProgram, app.Status,
app.CurrentStep, wizardDataJSON,
schoolProfileJSON, projectPlanJSON, budgetJSON, timelineJSON,
app.RequestedAmount, app.OwnContribution, app.ApprovedAmount,
app.UpdatedAt, app.SubmittedAt, app.UpdatedBy,
)
if err != nil {
return fmt.Errorf("failed to update application: %w", err)
}
if result.RowsAffected() == 0 {
return fmt.Errorf("application not found: %s", app.ID)
}
return nil
}
// DeleteApplication soft-deletes an application
func (s *PostgresStore) DeleteApplication(ctx context.Context, id uuid.UUID) error {
query := `UPDATE funding_applications SET status = 'ARCHIVED', updated_at = $2 WHERE id = $1`
result, err := s.pool.Exec(ctx, query, id, time.Now())
if err != nil {
return fmt.Errorf("failed to delete application: %w", err)
}
if result.RowsAffected() == 0 {
return fmt.Errorf("application not found: %s", id)
}
return nil
}
// ListApplications returns a paginated list of applications
func (s *PostgresStore) ListApplications(ctx context.Context, tenantID uuid.UUID, filter ApplicationFilter) (*ApplicationListResponse, error) {
// Build query with filters
query := `
SELECT
id, tenant_id, application_number, title, funding_program, status,
current_step, total_steps, wizard_data,
school_profile, project_plan, budget, timeline,
requested_amount, own_contribution, approved_amount,
created_at, updated_at, submitted_at, created_by, updated_by
FROM funding_applications
WHERE tenant_id = $1 AND status != 'ARCHIVED'
`
args := []interface{}{tenantID}
argIndex := 2
if filter.Status != nil {
query += fmt.Sprintf(" AND status = $%d", argIndex)
args = append(args, *filter.Status)
argIndex++
}
if filter.FundingProgram != nil {
query += fmt.Sprintf(" AND funding_program = $%d", argIndex)
args = append(args, *filter.FundingProgram)
argIndex++
}
// Count total
countQuery := `SELECT COUNT(*) FROM funding_applications WHERE tenant_id = $1 AND status != 'ARCHIVED'`
var total int
s.pool.QueryRow(ctx, countQuery, tenantID).Scan(&total)
// Add sorting and pagination
sortBy := "created_at"
if filter.SortBy != "" {
sortBy = filter.SortBy
}
sortOrder := "DESC"
if filter.SortOrder == "asc" {
sortOrder = "ASC"
}
query += fmt.Sprintf(" ORDER BY %s %s", sortBy, sortOrder)
if filter.PageSize <= 0 {
filter.PageSize = 20
}
if filter.Page <= 0 {
filter.Page = 1
}
offset := (filter.Page - 1) * filter.PageSize
query += fmt.Sprintf(" LIMIT %d OFFSET %d", filter.PageSize, offset)
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, fmt.Errorf("failed to list applications: %w", err)
}
defer rows.Close()
var apps []FundingApplication
for rows.Next() {
var app FundingApplication
var wizardDataJSON, schoolProfileJSON, projectPlanJSON, budgetJSON, timelineJSON []byte
err := rows.Scan(
&app.ID, &app.TenantID, &app.ApplicationNumber, &app.Title, &app.FundingProgram, &app.Status,
&app.CurrentStep, &app.TotalSteps, &wizardDataJSON,
&schoolProfileJSON, &projectPlanJSON, &budgetJSON, &timelineJSON,
&app.RequestedAmount, &app.OwnContribution, &app.ApprovedAmount,
&app.CreatedAt, &app.UpdatedAt, &app.SubmittedAt, &app.CreatedBy, &app.UpdatedBy,
)
if err != nil {
return nil, fmt.Errorf("failed to scan application: %w", err)
}
// Unmarshal JSON fields
if len(schoolProfileJSON) > 0 {
app.SchoolProfile = &SchoolProfile{}
json.Unmarshal(schoolProfileJSON, app.SchoolProfile)
}
apps = append(apps, app)
}
return &ApplicationListResponse{
Applications: apps,
Total: total,
Page: filter.Page,
PageSize: filter.PageSize,
}, nil
}
// SearchApplications searches applications by text
func (s *PostgresStore) SearchApplications(ctx context.Context, tenantID uuid.UUID, query string) ([]FundingApplication, error) {
searchQuery := `
SELECT id FROM funding_applications
WHERE tenant_id = $1
AND status != 'ARCHIVED'
AND (
title ILIKE $2
OR application_number ILIKE $2
OR school_profile::text ILIKE $2
)
ORDER BY updated_at DESC
LIMIT 50
`
rows, err := s.pool.Query(ctx, searchQuery, tenantID, "%"+query+"%")
if err != nil {
return nil, fmt.Errorf("failed to search applications: %w", err)
}
defer rows.Close()
var apps []FundingApplication
for rows.Next() {
var id uuid.UUID
if err := rows.Scan(&id); err != nil {
continue
}
app, err := s.GetApplication(ctx, id)
if err == nil {
apps = append(apps, *app)
}
}
return apps, nil
}
// SaveWizardStep saves data for a wizard step
func (s *PostgresStore) SaveWizardStep(ctx context.Context, appID uuid.UUID, step int, data map[string]interface{}) error {
// Get current wizard data
app, err := s.GetApplication(ctx, appID)
if err != nil {
return err
}
// Initialize wizard data if nil
if app.WizardData == nil {
app.WizardData = make(map[string]interface{})
}
// Merge step data
stepKey := fmt.Sprintf("step_%d", step)
app.WizardData[stepKey] = data
app.CurrentStep = step
// Update application
return s.UpdateApplication(ctx, app)
}
// GetWizardProgress returns the wizard progress
func (s *PostgresStore) GetWizardProgress(ctx context.Context, appID uuid.UUID) (*WizardProgress, error) {
app, err := s.GetApplication(ctx, appID)
if err != nil {
return nil, err
}
progress := &WizardProgress{
CurrentStep: app.CurrentStep,
TotalSteps: app.TotalSteps,
CompletedSteps: []int{},
FormData: app.WizardData,
LastSavedAt: app.UpdatedAt,
}
// Determine completed steps from wizard data
for i := 1; i <= app.TotalSteps; i++ {
stepKey := fmt.Sprintf("step_%d", i)
if _, ok := app.WizardData[stepKey]; ok {
progress.CompletedSteps = append(progress.CompletedSteps, i)
}
}
return progress, nil
}
// AddAttachment adds an attachment to an application
func (s *PostgresStore) AddAttachment(ctx context.Context, appID uuid.UUID, attachment *Attachment) error {
attachment.ID = uuid.New()
attachment.UploadedAt = time.Now()
query := `
INSERT INTO funding_attachments (
id, application_id, file_name, file_type, file_size,
category, description, storage_path, uploaded_at, uploaded_by
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
`
_, err := s.pool.Exec(ctx, query,
attachment.ID, appID, attachment.FileName, attachment.FileType, attachment.FileSize,
attachment.Category, attachment.Description, attachment.StoragePath,
attachment.UploadedAt, attachment.UploadedBy,
)
return err
}
// GetAttachments returns all attachments for an application
func (s *PostgresStore) GetAttachments(ctx context.Context, appID uuid.UUID) ([]Attachment, error) {
query := `
SELECT id, file_name, file_type, file_size, category, description, storage_path, uploaded_at, uploaded_by
FROM funding_attachments
WHERE application_id = $1
ORDER BY uploaded_at DESC
`
rows, err := s.pool.Query(ctx, query, appID)
if err != nil {
return nil, err
}
defer rows.Close()
var attachments []Attachment
for rows.Next() {
var a Attachment
err := rows.Scan(&a.ID, &a.FileName, &a.FileType, &a.FileSize, &a.Category, &a.Description, &a.StoragePath, &a.UploadedAt, &a.UploadedBy)
if err != nil {
continue
}
attachments = append(attachments, a)
}
return attachments, nil
}
// DeleteAttachment deletes an attachment
func (s *PostgresStore) DeleteAttachment(ctx context.Context, attachmentID uuid.UUID) error {
query := `DELETE FROM funding_attachments WHERE id = $1`
_, err := s.pool.Exec(ctx, query, attachmentID)
return err
}
// AddHistoryEntry adds an audit trail entry
func (s *PostgresStore) AddHistoryEntry(ctx context.Context, entry *ApplicationHistoryEntry) error {
entry.ID = uuid.New()
entry.PerformedAt = time.Now().Format(time.RFC3339)
oldValuesJSON, _ := json.Marshal(entry.OldValues)
newValuesJSON, _ := json.Marshal(entry.NewValues)
changedFieldsJSON, _ := json.Marshal(entry.ChangedFields)
query := `
INSERT INTO funding_application_history (
id, application_id, action, changed_fields, old_values, new_values,
performed_by, performed_at, notes
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
`
_, err := s.pool.Exec(ctx, query,
entry.ID, entry.ApplicationID, entry.Action, changedFieldsJSON, oldValuesJSON, newValuesJSON,
entry.PerformedBy, entry.PerformedAt, entry.Notes,
)
return err
}
// GetHistory returns the audit trail for an application
func (s *PostgresStore) GetHistory(ctx context.Context, appID uuid.UUID) ([]ApplicationHistoryEntry, error) {
query := `
SELECT id, application_id, action, changed_fields, old_values, new_values, performed_by, performed_at, notes
FROM funding_application_history
WHERE application_id = $1
ORDER BY performed_at DESC
`
rows, err := s.pool.Query(ctx, query, appID)
if err != nil {
return nil, err
}
defer rows.Close()
var history []ApplicationHistoryEntry
for rows.Next() {
var entry ApplicationHistoryEntry
var changedFieldsJSON, oldValuesJSON, newValuesJSON []byte
err := rows.Scan(
&entry.ID, &entry.ApplicationID, &entry.Action, &changedFieldsJSON, &oldValuesJSON, &newValuesJSON,
&entry.PerformedBy, &entry.PerformedAt, &entry.Notes,
)
if err != nil {
continue
}
json.Unmarshal(changedFieldsJSON, &entry.ChangedFields)
json.Unmarshal(oldValuesJSON, &entry.OldValues)
json.Unmarshal(newValuesJSON, &entry.NewValues)
history = append(history, entry)
}
return history, nil
}
// GetStatistics returns funding statistics
func (s *PostgresStore) GetStatistics(ctx context.Context, tenantID uuid.UUID) (*FundingStatistics, error) {
stats := &FundingStatistics{
ByProgram: make(map[FundingProgram]int),
ByState: make(map[FederalState]int),
}
// Total and by status
query := `
SELECT
COUNT(*) as total,
COUNT(*) FILTER (WHERE status = 'DRAFT') as draft,
COUNT(*) FILTER (WHERE status = 'SUBMITTED') as submitted,
COUNT(*) FILTER (WHERE status = 'APPROVED') as approved,
COUNT(*) FILTER (WHERE status = 'REJECTED') as rejected,
COALESCE(SUM(requested_amount), 0) as total_requested,
COALESCE(SUM(COALESCE(approved_amount, 0)), 0) as total_approved
FROM funding_applications
WHERE tenant_id = $1 AND status != 'ARCHIVED'
`
err := s.pool.QueryRow(ctx, query, tenantID).Scan(
&stats.TotalApplications, &stats.DraftCount, &stats.SubmittedCount,
&stats.ApprovedCount, &stats.RejectedCount,
&stats.TotalRequested, &stats.TotalApproved,
)
if err != nil {
return nil, err
}
// By program
programQuery := `
SELECT funding_program, COUNT(*)
FROM funding_applications
WHERE tenant_id = $1 AND status != 'ARCHIVED'
GROUP BY funding_program
`
rows, _ := s.pool.Query(ctx, programQuery, tenantID)
for rows.Next() {
var program FundingProgram
var count int
rows.Scan(&program, &count)
stats.ByProgram[program] = count
}
rows.Close()
return stats, nil
}
// SaveExportBundle saves an export bundle record
func (s *PostgresStore) SaveExportBundle(ctx context.Context, bundle *ExportBundle) error {
bundle.ID = uuid.New()
bundle.GeneratedAt = time.Now()
bundle.ExpiresAt = time.Now().Add(24 * time.Hour) // 24h expiry
documentsJSON, _ := json.Marshal(bundle.Documents)
query := `
INSERT INTO funding_export_bundles (
id, application_id, documents, generated_at, download_url, expires_at
) VALUES ($1, $2, $3, $4, $5, $6)
`
_, err := s.pool.Exec(ctx, query,
bundle.ID, bundle.ApplicationID, documentsJSON,
bundle.GeneratedAt, bundle.DownloadURL, bundle.ExpiresAt,
)
return err
}
// GetExportBundle retrieves an export bundle
func (s *PostgresStore) GetExportBundle(ctx context.Context, bundleID uuid.UUID) (*ExportBundle, error) {
query := `
SELECT id, application_id, documents, generated_at, download_url, expires_at
FROM funding_export_bundles
WHERE id = $1 AND expires_at > NOW()
`
var bundle ExportBundle
var documentsJSON []byte
err := s.pool.QueryRow(ctx, query, bundleID).Scan(
&bundle.ID, &bundle.ApplicationID, &documentsJSON,
&bundle.GeneratedAt, &bundle.DownloadURL, &bundle.ExpiresAt,
)
if err != nil {
return nil, err
}
json.Unmarshal(documentsJSON, &bundle.Documents)
return &bundle, nil
}
// generateApplicationNumber creates a unique application number
func (s *PostgresStore) generateApplicationNumber(program FundingProgram, school *SchoolProfile) string {
year := time.Now().Year()
state := "XX"
if school != nil {
state = string(school.FederalState)
}
prefix := "FA"
switch program {
case FundingProgramDigitalPakt1:
prefix = "DP1"
case FundingProgramDigitalPakt2:
prefix = "DP2"
case FundingProgramLandesfoerderung:
prefix = "LF"
}
// Get sequence number
var seq int
s.pool.QueryRow(context.Background(),
`SELECT COALESCE(MAX(CAST(SUBSTRING(application_number FROM '\d{5}$') AS INTEGER)), 0) + 1
FROM funding_applications WHERE application_number LIKE $1`,
fmt.Sprintf("%s-%s-%d-%%", prefix, state, year),
).Scan(&seq)
return fmt.Sprintf("%s-%s-%d-%05d", prefix, state, year, seq)
}

View File

@@ -1,81 +0,0 @@
package funding
import (
"context"
"github.com/google/uuid"
)
// Store defines the interface for funding application persistence
type Store interface {
// Application CRUD
CreateApplication(ctx context.Context, app *FundingApplication) error
GetApplication(ctx context.Context, id uuid.UUID) (*FundingApplication, error)
GetApplicationByNumber(ctx context.Context, number string) (*FundingApplication, error)
UpdateApplication(ctx context.Context, app *FundingApplication) error
DeleteApplication(ctx context.Context, id uuid.UUID) error
// List & Search
ListApplications(ctx context.Context, tenantID uuid.UUID, filter ApplicationFilter) (*ApplicationListResponse, error)
SearchApplications(ctx context.Context, tenantID uuid.UUID, query string) ([]FundingApplication, error)
// Wizard Data
SaveWizardStep(ctx context.Context, appID uuid.UUID, step int, data map[string]interface{}) error
GetWizardProgress(ctx context.Context, appID uuid.UUID) (*WizardProgress, error)
// Attachments
AddAttachment(ctx context.Context, appID uuid.UUID, attachment *Attachment) error
GetAttachments(ctx context.Context, appID uuid.UUID) ([]Attachment, error)
DeleteAttachment(ctx context.Context, attachmentID uuid.UUID) error
// Application History (Audit Trail)
AddHistoryEntry(ctx context.Context, entry *ApplicationHistoryEntry) error
GetHistory(ctx context.Context, appID uuid.UUID) ([]ApplicationHistoryEntry, error)
// Statistics
GetStatistics(ctx context.Context, tenantID uuid.UUID) (*FundingStatistics, error)
// Export Tracking
SaveExportBundle(ctx context.Context, bundle *ExportBundle) error
GetExportBundle(ctx context.Context, bundleID uuid.UUID) (*ExportBundle, error)
}
// ApplicationFilter for filtering list queries
type ApplicationFilter struct {
Status *ApplicationStatus `json:"status,omitempty"`
FundingProgram *FundingProgram `json:"funding_program,omitempty"`
FederalState *FederalState `json:"federal_state,omitempty"`
CreatedAfter *string `json:"created_after,omitempty"`
CreatedBefore *string `json:"created_before,omitempty"`
Page int `json:"page"`
PageSize int `json:"page_size"`
SortBy string `json:"sort_by,omitempty"`
SortOrder string `json:"sort_order,omitempty"` // asc, desc
}
// ApplicationHistoryEntry for audit trail
type ApplicationHistoryEntry struct {
ID uuid.UUID `json:"id"`
ApplicationID uuid.UUID `json:"application_id"`
Action string `json:"action"` // created, updated, submitted, approved, etc.
ChangedFields []string `json:"changed_fields,omitempty"`
OldValues map[string]interface{} `json:"old_values,omitempty"`
NewValues map[string]interface{} `json:"new_values,omitempty"`
PerformedBy uuid.UUID `json:"performed_by"`
PerformedAt string `json:"performed_at"`
Notes string `json:"notes,omitempty"`
}
// FundingStatistics for dashboard
type FundingStatistics struct {
TotalApplications int `json:"total_applications"`
DraftCount int `json:"draft_count"`
SubmittedCount int `json:"submitted_count"`
ApprovedCount int `json:"approved_count"`
RejectedCount int `json:"rejected_count"`
TotalRequested float64 `json:"total_requested"`
TotalApproved float64 `json:"total_approved"`
AverageProcessDays float64 `json:"average_process_days"`
ByProgram map[FundingProgram]int `json:"by_program"`
ByState map[FederalState]int `json:"by_state"`
}

View File

@@ -1,371 +0,0 @@
package gci
import (
"fmt"
"math"
"time"
)
// Engine calculates the GCI score
type Engine struct{}
// NewEngine creates a new GCI calculation engine
func NewEngine() *Engine {
return &Engine{}
}
// Calculate computes the full GCI result for a tenant
func (e *Engine) Calculate(tenantID string, profileID string) *GCIResult {
now := time.Now()
profile := GetProfile(profileID)
auditTrail := []AuditEntry{}
// Step 1: Get module data (mock for now)
modules := MockModuleData(tenantID)
certDates := MockCertificateData()
// Step 2: Calculate Level 1 - Module Scores with validity
for i := range modules {
m := &modules[i]
if m.Assigned > 0 {
m.RawScore = float64(m.Completed) / float64(m.Assigned) * 100.0
}
// Apply validity factor
if validUntil, ok := certDates[m.ModuleID]; ok {
m.ValidityFactor = CalculateValidityFactor(validUntil, now)
} else {
m.ValidityFactor = 1.0 // No certificate tracking = assume valid
}
m.FinalScore = m.RawScore * m.ValidityFactor
if m.ValidityFactor < 1.0 {
auditTrail = append(auditTrail, AuditEntry{
Timestamp: now,
Factor: "validity_decay",
Description: fmt.Sprintf("Modul '%s': Gueltigkeitsfaktor %.2f (Zertifikat laeuft ab/abgelaufen)", m.ModuleName, m.ValidityFactor),
Value: m.ValidityFactor,
Impact: "negative",
})
}
}
// Step 3: Calculate Level 2 - Risk-Weighted Scores per area
areaModules := map[string][]ModuleScore{
"dsgvo": {},
"nis2": {},
"iso27001": {},
"ai_act": {},
}
for _, m := range modules {
if _, ok := areaModules[m.Category]; ok {
areaModules[m.Category] = append(areaModules[m.Category], m)
}
}
level2Areas := []RiskWeightedScore{}
areaNames := map[string]string{
"dsgvo": "DSGVO",
"nis2": "NIS2",
"iso27001": "ISO 27001",
"ai_act": "EU AI Act",
}
for areaID, mods := range areaModules {
rws := RiskWeightedScore{
AreaID: areaID,
AreaName: areaNames[areaID],
Modules: mods,
}
for _, m := range mods {
rws.WeightedSum += m.FinalScore * m.RiskWeight
rws.TotalWeight += m.RiskWeight
}
if rws.TotalWeight > 0 {
rws.AreaScore = rws.WeightedSum / rws.TotalWeight
}
level2Areas = append(level2Areas, rws)
}
// Step 4: Calculate Level 3 - Regulation Area Scores
areaScores := []RegulationAreaScore{}
for _, rws := range level2Areas {
weight := profile.Weights[rws.AreaID]
completedCount := 0
for _, m := range rws.Modules {
if m.Completed >= m.Assigned && m.Assigned > 0 {
completedCount++
}
}
ras := RegulationAreaScore{
RegulationID: rws.AreaID,
RegulationName: rws.AreaName,
Score: math.Round(rws.AreaScore*100) / 100,
Weight: weight,
WeightedScore: rws.AreaScore * weight,
ModuleCount: len(rws.Modules),
CompletedCount: completedCount,
}
areaScores = append(areaScores, ras)
auditTrail = append(auditTrail, AuditEntry{
Timestamp: now,
Factor: "area_score",
Description: fmt.Sprintf("Bereich '%s': Score %.1f, Gewicht %.0f%%", rws.AreaName, rws.AreaScore, weight*100),
Value: rws.AreaScore,
Impact: "neutral",
})
}
// Step 5: Calculate raw GCI
rawGCI := 0.0
totalWeight := 0.0
for _, ras := range areaScores {
rawGCI += ras.WeightedScore
totalWeight += ras.Weight
}
if totalWeight > 0 {
rawGCI = rawGCI / totalWeight
}
// Step 6: Apply Criticality Multiplier
criticalityMult := calculateCriticalityMultiplier(modules)
auditTrail = append(auditTrail, AuditEntry{
Timestamp: now,
Factor: "criticality_multiplier",
Description: fmt.Sprintf("Kritikalitaetsmultiplikator: %.3f", criticalityMult),
Value: criticalityMult,
Impact: func() string {
if criticalityMult < 1.0 {
return "negative"
}
return "neutral"
}(),
})
// Step 7: Apply Incident Adjustment
openInc, critInc := MockIncidentData()
incidentAdj := calculateIncidentAdjustment(openInc, critInc)
auditTrail = append(auditTrail, AuditEntry{
Timestamp: now,
Factor: "incident_adjustment",
Description: fmt.Sprintf("Vorfallsanpassung: %.3f (%d offen, %d kritisch)", incidentAdj, openInc, critInc),
Value: incidentAdj,
Impact: "negative",
})
// Step 8: Final GCI
finalGCI := rawGCI * criticalityMult * incidentAdj
finalGCI = math.Max(0, math.Min(100, math.Round(finalGCI*10)/10))
// Step 9: Determine Maturity Level
maturity := determineMaturityLevel(finalGCI)
auditTrail = append(auditTrail, AuditEntry{
Timestamp: now,
Factor: "final_gci",
Description: fmt.Sprintf("GCI-Endergebnis: %.1f → Reifegrad: %s", finalGCI, MaturityLabels[maturity]),
Value: finalGCI,
Impact: "neutral",
})
return &GCIResult{
TenantID: tenantID,
GCIScore: finalGCI,
MaturityLevel: maturity,
MaturityLabel: MaturityLabels[maturity],
CalculatedAt: now,
Profile: profileID,
AreaScores: areaScores,
CriticalityMult: criticalityMult,
IncidentAdj: incidentAdj,
AuditTrail: auditTrail,
}
}
// CalculateBreakdown returns the full 4-level breakdown
func (e *Engine) CalculateBreakdown(tenantID string, profileID string) *GCIBreakdown {
result := e.Calculate(tenantID, profileID)
modules := MockModuleData(tenantID)
certDates := MockCertificateData()
now := time.Now()
// Recalculate module scores for the breakdown
for i := range modules {
m := &modules[i]
if m.Assigned > 0 {
m.RawScore = float64(m.Completed) / float64(m.Assigned) * 100.0
}
if validUntil, ok := certDates[m.ModuleID]; ok {
m.ValidityFactor = CalculateValidityFactor(validUntil, now)
} else {
m.ValidityFactor = 1.0
}
m.FinalScore = m.RawScore * m.ValidityFactor
}
// Build Level 2 areas
areaModules := map[string][]ModuleScore{}
for _, m := range modules {
areaModules[m.Category] = append(areaModules[m.Category], m)
}
areaNames := map[string]string{"dsgvo": "DSGVO", "nis2": "NIS2", "iso27001": "ISO 27001", "ai_act": "EU AI Act"}
level2 := []RiskWeightedScore{}
for areaID, mods := range areaModules {
rws := RiskWeightedScore{AreaID: areaID, AreaName: areaNames[areaID], Modules: mods}
for _, m := range mods {
rws.WeightedSum += m.FinalScore * m.RiskWeight
rws.TotalWeight += m.RiskWeight
}
if rws.TotalWeight > 0 {
rws.AreaScore = rws.WeightedSum / rws.TotalWeight
}
level2 = append(level2, rws)
}
return &GCIBreakdown{
GCIResult: *result,
Level1Modules: modules,
Level2Areas: level2,
}
}
// GetHistory returns historical GCI snapshots
func (e *Engine) GetHistory(tenantID string) []GCISnapshot {
// Add current score to history
result := e.Calculate(tenantID, "default")
history := MockGCIHistory(tenantID)
current := GCISnapshot{
TenantID: tenantID,
Score: result.GCIScore,
MaturityLevel: result.MaturityLevel,
AreaScores: make(map[string]float64),
CalculatedAt: result.CalculatedAt,
}
for _, as := range result.AreaScores {
current.AreaScores[as.RegulationID] = as.Score
}
history = append(history, current)
return history
}
// GetMatrix returns the compliance matrix (roles x regulations)
func (e *Engine) GetMatrix(tenantID string) []ComplianceMatrixEntry {
modules := MockModuleData(tenantID)
roles := []struct {
ID string
Name string
}{
{"management", "Geschaeftsfuehrung"},
{"it_security", "IT-Sicherheit / CISO"},
{"data_protection", "Datenschutz / DSB"},
{"hr", "Personalwesen"},
{"general", "Allgemeine Mitarbeiter"},
}
// Define which modules are relevant per role
roleModules := map[string][]string{
"management": {"dsgvo-grundlagen", "nis2-management", "ai-governance", "iso-isms"},
"it_security": {"nis2-risikomanagement", "nis2-incident-response", "iso-zugangssteuerung", "iso-kryptografie", "ai-hochrisiko"},
"data_protection": {"dsgvo-grundlagen", "dsgvo-betroffenenrechte", "dsgvo-tom", "dsgvo-dsfa", "dsgvo-auftragsverarbeitung"},
"hr": {"dsgvo-grundlagen", "dsgvo-betroffenenrechte", "nis2-management"},
"general": {"dsgvo-grundlagen", "nis2-risikomanagement", "ai-risikokategorien", "ai-transparenz"},
}
moduleMap := map[string]ModuleScore{}
for _, m := range modules {
moduleMap[m.ModuleID] = m
}
entries := []ComplianceMatrixEntry{}
for _, role := range roles {
entry := ComplianceMatrixEntry{
Role: role.ID,
RoleName: role.Name,
Regulations: map[string]float64{},
}
regScores := map[string][]float64{}
requiredModuleIDs := roleModules[role.ID]
entry.RequiredModules = len(requiredModuleIDs)
for _, modID := range requiredModuleIDs {
if m, ok := moduleMap[modID]; ok {
score := 0.0
if m.Assigned > 0 {
score = float64(m.Completed) / float64(m.Assigned) * 100
}
regScores[m.Category] = append(regScores[m.Category], score)
if m.Completed >= m.Assigned && m.Assigned > 0 {
entry.CompletedModules++
}
}
}
totalScore := 0.0
count := 0
for reg, scores := range regScores {
sum := 0.0
for _, s := range scores {
sum += s
}
avg := sum / float64(len(scores))
entry.Regulations[reg] = math.Round(avg*10) / 10
totalScore += avg
count++
}
if count > 0 {
entry.OverallScore = math.Round(totalScore/float64(count)*10) / 10
}
entries = append(entries, entry)
}
return entries
}
// Helper functions
func calculateCriticalityMultiplier(modules []ModuleScore) float64 {
criticalModules := 0
criticalLow := 0
for _, m := range modules {
if m.RiskWeight >= 2.5 {
criticalModules++
if m.FinalScore < 50 {
criticalLow++
}
}
}
if criticalModules == 0 {
return 1.0
}
// Reduce score if critical modules have low completion
ratio := float64(criticalLow) / float64(criticalModules)
return 1.0 - (ratio * 0.15) // max 15% reduction
}
func calculateIncidentAdjustment(openIncidents, criticalIncidents int) float64 {
adj := 1.0
// Each open incident reduces by 1%
adj -= float64(openIncidents) * 0.01
// Each critical incident reduces by additional 3%
adj -= float64(criticalIncidents) * 0.03
return math.Max(0.8, adj) // minimum 80% (max 20% reduction)
}
func determineMaturityLevel(score float64) string {
switch {
case score >= 90:
return MaturityOptimized
case score >= 75:
return MaturityManaged
case score >= 60:
return MaturityDefined
case score >= 40:
return MaturityReactive
default:
return MaturityHighRisk
}
}

View File

@@ -1,188 +0,0 @@
package gci
import "math"
// ISOGapAnalysis represents the complete ISO 27001 gap analysis
type ISOGapAnalysis struct {
TenantID string `json:"tenant_id"`
TotalControls int `json:"total_controls"`
CoveredFull int `json:"covered_full"`
CoveredPartial int `json:"covered_partial"`
NotCovered int `json:"not_covered"`
CoveragePercent float64 `json:"coverage_percent"`
CategorySummaries []ISOCategorySummary `json:"category_summaries"`
ControlDetails []ISOControlDetail `json:"control_details"`
Gaps []ISOGap `json:"gaps"`
}
// ISOControlDetail shows coverage status for a single control
type ISOControlDetail struct {
Control ISOControl `json:"control"`
CoverageLevel string `json:"coverage_level"` // full, partial, none
CoveredBy []string `json:"covered_by"` // module IDs
Score float64 `json:"score"` // 0-100
}
// ISOGap represents an identified gap in ISO coverage
type ISOGap struct {
ControlID string `json:"control_id"`
ControlName string `json:"control_name"`
Category string `json:"category"`
Priority string `json:"priority"` // high, medium, low
Recommendation string `json:"recommendation"`
}
// CalculateISOGapAnalysis performs the ISO 27001 gap analysis
func CalculateISOGapAnalysis(tenantID string) *ISOGapAnalysis {
modules := MockModuleData(tenantID)
moduleMap := map[string]ModuleScore{}
for _, m := range modules {
moduleMap[m.ModuleID] = m
}
// Build reverse mapping: control -> modules covering it
controlCoverage := map[string][]string{}
controlCoverageLevel := map[string]string{}
for _, mapping := range DefaultISOModuleMappings {
for _, controlID := range mapping.ISOControls {
controlCoverage[controlID] = append(controlCoverage[controlID], mapping.ModuleID)
// Use the highest coverage level
existingLevel := controlCoverageLevel[controlID]
if mapping.CoverageLevel == "full" || existingLevel == "" {
controlCoverageLevel[controlID] = mapping.CoverageLevel
}
}
}
// Analyze each control
details := []ISOControlDetail{}
gaps := []ISOGap{}
coveredFull := 0
coveredPartial := 0
notCovered := 0
categoryCounts := map[string]*ISOCategorySummary{
"A.5": {CategoryID: "A.5", CategoryName: "Organisatorische Massnahmen"},
"A.6": {CategoryID: "A.6", CategoryName: "Personelle Massnahmen"},
"A.7": {CategoryID: "A.7", CategoryName: "Physische Massnahmen"},
"A.8": {CategoryID: "A.8", CategoryName: "Technologische Massnahmen"},
}
for _, control := range ISOControls {
coveredBy := controlCoverage[control.ID]
level := controlCoverageLevel[control.ID]
if len(coveredBy) == 0 {
level = "none"
}
// Calculate score based on module completion
score := 0.0
if len(coveredBy) > 0 {
scoreSum := 0.0
count := 0
for _, modID := range coveredBy {
if m, ok := moduleMap[modID]; ok && m.Assigned > 0 {
scoreSum += float64(m.Completed) / float64(m.Assigned) * 100
count++
}
}
if count > 0 {
score = scoreSum / float64(count)
}
// Adjust for coverage level
if level == "partial" {
score *= 0.7 // partial coverage reduces effective score
}
}
detail := ISOControlDetail{
Control: control,
CoverageLevel: level,
CoveredBy: coveredBy,
Score: math.Round(score*10) / 10,
}
details = append(details, detail)
// Count by category
cat := categoryCounts[control.CategoryID]
if cat != nil {
cat.TotalControls++
switch level {
case "full":
coveredFull++
cat.CoveredFull++
case "partial":
coveredPartial++
cat.CoveredPartial++
default:
notCovered++
cat.NotCovered++
// Generate gap recommendation
gap := ISOGap{
ControlID: control.ID,
ControlName: control.Name,
Category: control.Category,
Priority: determineGapPriority(control),
Recommendation: generateGapRecommendation(control),
}
gaps = append(gaps, gap)
}
}
}
totalControls := len(ISOControls)
coveragePercent := 0.0
if totalControls > 0 {
coveragePercent = math.Round(float64(coveredFull+coveredPartial)/float64(totalControls)*100*10) / 10
}
summaries := []ISOCategorySummary{}
for _, catID := range []string{"A.5", "A.6", "A.7", "A.8"} {
if cat, ok := categoryCounts[catID]; ok {
summaries = append(summaries, *cat)
}
}
return &ISOGapAnalysis{
TenantID: tenantID,
TotalControls: totalControls,
CoveredFull: coveredFull,
CoveredPartial: coveredPartial,
NotCovered: notCovered,
CoveragePercent: coveragePercent,
CategorySummaries: summaries,
ControlDetails: details,
Gaps: gaps,
}
}
func determineGapPriority(control ISOControl) string {
// High priority for access, incident, and data protection controls
highPriority := map[string]bool{
"A.5.15": true, "A.5.17": true, "A.5.24": true, "A.5.26": true,
"A.5.34": true, "A.8.2": true, "A.8.5": true, "A.8.7": true,
"A.8.10": true, "A.8.20": true,
}
if highPriority[control.ID] {
return "high"
}
// Medium for organizational and people controls
if control.CategoryID == "A.5" || control.CategoryID == "A.6" {
return "medium"
}
return "low"
}
func generateGapRecommendation(control ISOControl) string {
recommendations := map[string]string{
"organizational": "Erstellen Sie eine Richtlinie und weisen Sie Verantwortlichkeiten zu fuer: " + control.Name,
"people": "Implementieren Sie Schulungen und Prozesse fuer: " + control.Name,
"physical": "Definieren Sie physische Sicherheitsmassnahmen fuer: " + control.Name,
"technological": "Implementieren Sie technische Kontrollen fuer: " + control.Name,
}
if rec, ok := recommendations[control.Category]; ok {
return rec
}
return "Massnahmen implementieren fuer: " + control.Name
}

View File

@@ -1,207 +0,0 @@
package gci
// ISOControl represents an ISO 27001:2022 Annex A control
type ISOControl struct {
ID string `json:"id"` // e.g. "A.5.1"
Name string `json:"name"`
Category string `json:"category"` // organizational, people, physical, technological
CategoryID string `json:"category_id"` // A.5, A.6, A.7, A.8
Description string `json:"description"`
}
// ISOModuleMapping maps a course/module to ISO controls
type ISOModuleMapping struct {
ModuleID string `json:"module_id"`
ModuleName string `json:"module_name"`
ISOControls []string `json:"iso_controls"` // control IDs
CoverageLevel string `json:"coverage_level"` // full, partial, none
}
// ISO 27001:2022 Annex A controls (representative selection)
var ISOControls = []ISOControl{
// A.5 Organizational Controls (37 controls, showing key ones)
{ID: "A.5.1", Name: "Informationssicherheitsrichtlinien", Category: "organizational", CategoryID: "A.5", Description: "Informationssicherheitsleitlinie und themenspezifische Richtlinien"},
{ID: "A.5.2", Name: "Rollen und Verantwortlichkeiten", Category: "organizational", CategoryID: "A.5", Description: "Definition und Zuweisung von Informationssicherheitsrollen"},
{ID: "A.5.3", Name: "Aufgabentrennung", Category: "organizational", CategoryID: "A.5", Description: "Trennung von konfligierenden Aufgaben und Verantwortlichkeiten"},
{ID: "A.5.4", Name: "Managementverantwortung", Category: "organizational", CategoryID: "A.5", Description: "Fuehrungskraefte muessen Sicherheitsrichtlinien einhalten und durchsetzen"},
{ID: "A.5.5", Name: "Kontakt mit Behoerden", Category: "organizational", CategoryID: "A.5", Description: "Pflege von Kontakten zu relevanten Aufsichtsbehoerden"},
{ID: "A.5.6", Name: "Kontakt mit Interessengruppen", Category: "organizational", CategoryID: "A.5", Description: "Kontakt zu Fachgruppen und Sicherheitsforen"},
{ID: "A.5.7", Name: "Bedrohungsintelligenz", Category: "organizational", CategoryID: "A.5", Description: "Sammlung und Analyse von Bedrohungsinformationen"},
{ID: "A.5.8", Name: "Informationssicherheit im Projektmanagement", Category: "organizational", CategoryID: "A.5", Description: "Integration von Sicherheit in Projektmanagement"},
{ID: "A.5.9", Name: "Inventar der Informationswerte", Category: "organizational", CategoryID: "A.5", Description: "Inventarisierung und Verwaltung von Informationswerten"},
{ID: "A.5.10", Name: "Zuleassige Nutzung", Category: "organizational", CategoryID: "A.5", Description: "Regeln fuer die zuleassige Nutzung von Informationswerten"},
{ID: "A.5.11", Name: "Rueckgabe von Werten", Category: "organizational", CategoryID: "A.5", Description: "Rueckgabe von Werten bei Beendigung"},
{ID: "A.5.12", Name: "Klassifizierung von Informationen", Category: "organizational", CategoryID: "A.5", Description: "Klassifizierungsschema fuer Informationen"},
{ID: "A.5.13", Name: "Kennzeichnung von Informationen", Category: "organizational", CategoryID: "A.5", Description: "Kennzeichnung gemaess Klassifizierung"},
{ID: "A.5.14", Name: "Informationsuebertragung", Category: "organizational", CategoryID: "A.5", Description: "Regeln fuer sichere Informationsuebertragung"},
{ID: "A.5.15", Name: "Zugangssteuerung", Category: "organizational", CategoryID: "A.5", Description: "Zugangssteuerungsrichtlinie"},
{ID: "A.5.16", Name: "Identitaetsmanagement", Category: "organizational", CategoryID: "A.5", Description: "Verwaltung des Lebenszyklus von Identitaeten"},
{ID: "A.5.17", Name: "Authentifizierungsinformationen", Category: "organizational", CategoryID: "A.5", Description: "Verwaltung von Authentifizierungsinformationen"},
{ID: "A.5.18", Name: "Zugriffsrechte", Category: "organizational", CategoryID: "A.5", Description: "Vergabe, Pruefung und Entzug von Zugriffsrechten"},
{ID: "A.5.19", Name: "Informationssicherheit in Lieferantenbeziehungen", Category: "organizational", CategoryID: "A.5", Description: "Sicherheitsanforderungen an Lieferanten"},
{ID: "A.5.20", Name: "Informationssicherheit in Lieferantenvereinbarungen", Category: "organizational", CategoryID: "A.5", Description: "Sicherheitsklauseln in Vertraegen"},
{ID: "A.5.21", Name: "IKT-Lieferkette", Category: "organizational", CategoryID: "A.5", Description: "Management der IKT-Lieferkette"},
{ID: "A.5.22", Name: "Ueberwachung von Lieferantenservices", Category: "organizational", CategoryID: "A.5", Description: "Ueberwachung und Pruefung von Lieferantenservices"},
{ID: "A.5.23", Name: "Cloud-Sicherheit", Category: "organizational", CategoryID: "A.5", Description: "Informationssicherheit fuer Cloud-Dienste"},
{ID: "A.5.24", Name: "Vorfallsmanagement - Planung", Category: "organizational", CategoryID: "A.5", Description: "Planung und Vorbereitung des Vorfallsmanagements"},
{ID: "A.5.25", Name: "Vorfallsbeurteilung", Category: "organizational", CategoryID: "A.5", Description: "Beurteilung und Entscheidung ueber Sicherheitsereignisse"},
{ID: "A.5.26", Name: "Vorfallsreaktion", Category: "organizational", CategoryID: "A.5", Description: "Reaktion auf Sicherheitsvorfaelle"},
{ID: "A.5.27", Name: "Aus Vorfaellen lernen", Category: "organizational", CategoryID: "A.5", Description: "Lessons Learned aus Sicherheitsvorfaellen"},
{ID: "A.5.28", Name: "Beweissicherung", Category: "organizational", CategoryID: "A.5", Description: "Identifikation und Sicherung von Beweisen"},
{ID: "A.5.29", Name: "Informationssicherheit bei Stoerungen", Category: "organizational", CategoryID: "A.5", Description: "Sicherheit waehrend Stoerungen und Krisen"},
{ID: "A.5.30", Name: "IKT-Bereitschaft fuer Business Continuity", Category: "organizational", CategoryID: "A.5", Description: "IKT-Bereitschaft zur Unterstuetzung der Geschaeftskontinuitaet"},
{ID: "A.5.31", Name: "Rechtliche Anforderungen", Category: "organizational", CategoryID: "A.5", Description: "Einhaltung rechtlicher und vertraglicher Anforderungen"},
{ID: "A.5.32", Name: "Geistige Eigentumsrechte", Category: "organizational", CategoryID: "A.5", Description: "Schutz geistigen Eigentums"},
{ID: "A.5.33", Name: "Schutz von Aufzeichnungen", Category: "organizational", CategoryID: "A.5", Description: "Schutz von Aufzeichnungen vor Verlust und Manipulation"},
{ID: "A.5.34", Name: "Datenschutz und PII", Category: "organizational", CategoryID: "A.5", Description: "Datenschutz und Schutz personenbezogener Daten"},
{ID: "A.5.35", Name: "Unabhaengige Ueberpruefung", Category: "organizational", CategoryID: "A.5", Description: "Unabhaengige Ueberpruefung der Informationssicherheit"},
{ID: "A.5.36", Name: "Richtlinienkonformitaet", Category: "organizational", CategoryID: "A.5", Description: "Einhaltung von Richtlinien und Standards"},
{ID: "A.5.37", Name: "Dokumentierte Betriebsverfahren", Category: "organizational", CategoryID: "A.5", Description: "Dokumentation von Betriebsverfahren"},
// A.6 People Controls (8 controls)
{ID: "A.6.1", Name: "Ueberpruefen", Category: "people", CategoryID: "A.6", Description: "Hintergrundpruefungen vor der Einstellung"},
{ID: "A.6.2", Name: "Beschaeftigungsbedingungen", Category: "people", CategoryID: "A.6", Description: "Sicherheitsanforderungen in Arbeitsvertraegen"},
{ID: "A.6.3", Name: "Sensibilisierung und Schulung", Category: "people", CategoryID: "A.6", Description: "Awareness-Programme und Schulungen"},
{ID: "A.6.4", Name: "Disziplinarverfahren", Category: "people", CategoryID: "A.6", Description: "Formales Disziplinarverfahren"},
{ID: "A.6.5", Name: "Verantwortlichkeiten nach Beendigung", Category: "people", CategoryID: "A.6", Description: "Sicherheitspflichten nach Beendigung des Beschaeftigungsverhaeltnisses"},
{ID: "A.6.6", Name: "Vertraulichkeitsvereinbarungen", Category: "people", CategoryID: "A.6", Description: "Vertraulichkeits- und Geheimhaltungsvereinbarungen"},
{ID: "A.6.7", Name: "Remote-Arbeit", Category: "people", CategoryID: "A.6", Description: "Sicherheitsmassnahmen fuer Remote-Arbeit"},
{ID: "A.6.8", Name: "Meldung von Sicherheitsereignissen", Category: "people", CategoryID: "A.6", Description: "Mechanismen zur Meldung von Sicherheitsereignissen"},
// A.7 Physical Controls (14 controls, showing key ones)
{ID: "A.7.1", Name: "Physische Sicherheitsperimeter", Category: "physical", CategoryID: "A.7", Description: "Definition physischer Sicherheitszonen"},
{ID: "A.7.2", Name: "Physischer Zutritt", Category: "physical", CategoryID: "A.7", Description: "Zutrittskontrolle zu Sicherheitszonen"},
{ID: "A.7.3", Name: "Sicherung von Bueros und Raeumen", Category: "physical", CategoryID: "A.7", Description: "Physische Sicherheit fuer Bueros und Raeume"},
{ID: "A.7.4", Name: "Physische Sicherheitsueberwachung", Category: "physical", CategoryID: "A.7", Description: "Ueberwachung physischer Sicherheit"},
{ID: "A.7.5", Name: "Schutz vor Umweltgefahren", Category: "physical", CategoryID: "A.7", Description: "Schutz gegen natuerliche und menschgemachte Gefahren"},
{ID: "A.7.6", Name: "Arbeit in Sicherheitszonen", Category: "physical", CategoryID: "A.7", Description: "Regeln fuer das Arbeiten in Sicherheitszonen"},
{ID: "A.7.7", Name: "Aufgeraemter Schreibtisch", Category: "physical", CategoryID: "A.7", Description: "Clean-Desk und Clear-Screen Richtlinie"},
{ID: "A.7.8", Name: "Geraeteplatzierung", Category: "physical", CategoryID: "A.7", Description: "Platzierung und Schutz von Geraeten"},
{ID: "A.7.9", Name: "Sicherheit von Geraeten ausserhalb", Category: "physical", CategoryID: "A.7", Description: "Sicherheit von Geraeten ausserhalb der Raeumlichkeiten"},
{ID: "A.7.10", Name: "Speichermedien", Category: "physical", CategoryID: "A.7", Description: "Verwaltung von Speichermedien"},
{ID: "A.7.11", Name: "Versorgungseinrichtungen", Category: "physical", CategoryID: "A.7", Description: "Schutz vor Ausfaellen der Versorgungseinrichtungen"},
{ID: "A.7.12", Name: "Verkabelungssicherheit", Category: "physical", CategoryID: "A.7", Description: "Schutz der Verkabelung"},
{ID: "A.7.13", Name: "Instandhaltung von Geraeten", Category: "physical", CategoryID: "A.7", Description: "Korrekte Instandhaltung von Geraeten"},
{ID: "A.7.14", Name: "Sichere Entsorgung", Category: "physical", CategoryID: "A.7", Description: "Sichere Entsorgung oder Wiederverwendung"},
// A.8 Technological Controls (34 controls, showing key ones)
{ID: "A.8.1", Name: "Endbenutzergeraete", Category: "technological", CategoryID: "A.8", Description: "Sicherheit von Endbenutzergeraeten"},
{ID: "A.8.2", Name: "Privilegierte Zugriffsrechte", Category: "technological", CategoryID: "A.8", Description: "Verwaltung privilegierter Zugriffsrechte"},
{ID: "A.8.3", Name: "Informationszugangsbeschraenkung", Category: "technological", CategoryID: "A.8", Description: "Beschraenkung des Zugangs zu Informationen"},
{ID: "A.8.4", Name: "Zugang zu Quellcode", Category: "technological", CategoryID: "A.8", Description: "Sicherer Zugang zu Quellcode"},
{ID: "A.8.5", Name: "Sichere Authentifizierung", Category: "technological", CategoryID: "A.8", Description: "Sichere Authentifizierungstechnologien"},
{ID: "A.8.6", Name: "Kapazitaetsmanagement", Category: "technological", CategoryID: "A.8", Description: "Ueberwachung und Anpassung der Kapazitaet"},
{ID: "A.8.7", Name: "Schutz gegen Malware", Category: "technological", CategoryID: "A.8", Description: "Schutz vor Schadprogrammen"},
{ID: "A.8.8", Name: "Management technischer Schwachstellen", Category: "technological", CategoryID: "A.8", Description: "Identifikation und Behebung von Schwachstellen"},
{ID: "A.8.9", Name: "Konfigurationsmanagement", Category: "technological", CategoryID: "A.8", Description: "Sichere Konfiguration von Systemen"},
{ID: "A.8.10", Name: "Datensicherung", Category: "technological", CategoryID: "A.8", Description: "Erstellen und Testen von Datensicherungen"},
{ID: "A.8.11", Name: "Datenredundanz", Category: "technological", CategoryID: "A.8", Description: "Redundanz von Informationsverarbeitungseinrichtungen"},
{ID: "A.8.12", Name: "Protokollierung", Category: "technological", CategoryID: "A.8", Description: "Aufzeichnung und Ueberwachung von Aktivitaeten"},
{ID: "A.8.13", Name: "Ueberwachung von Aktivitaeten", Category: "technological", CategoryID: "A.8", Description: "Ueberwachung von Netzwerken und Systemen"},
{ID: "A.8.14", Name: "Zeitsynchronisation", Category: "technological", CategoryID: "A.8", Description: "Synchronisation von Uhren"},
{ID: "A.8.15", Name: "Nutzung privilegierter Hilfsprogramme", Category: "technological", CategoryID: "A.8", Description: "Einschraenkung privilegierter Hilfsprogramme"},
{ID: "A.8.16", Name: "Softwareinstallation", Category: "technological", CategoryID: "A.8", Description: "Kontrolle der Softwareinstallation"},
{ID: "A.8.17", Name: "Netzwerksicherheit", Category: "technological", CategoryID: "A.8", Description: "Sicherheit von Netzwerken"},
{ID: "A.8.18", Name: "Netzwerksegmentierung", Category: "technological", CategoryID: "A.8", Description: "Segmentierung von Netzwerken"},
{ID: "A.8.19", Name: "Webfilterung", Category: "technological", CategoryID: "A.8", Description: "Filterung des Webzugangs"},
{ID: "A.8.20", Name: "Kryptografie", Category: "technological", CategoryID: "A.8", Description: "Einsatz kryptografischer Massnahmen"},
{ID: "A.8.21", Name: "Sichere Entwicklung", Category: "technological", CategoryID: "A.8", Description: "Sichere Entwicklungslebenszyklus"},
{ID: "A.8.22", Name: "Sicherheitsanforderungen bei Applikationen", Category: "technological", CategoryID: "A.8", Description: "Sicherheitsanforderungen bei Anwendungen"},
{ID: "A.8.23", Name: "Sichere Systemarchitektur", Category: "technological", CategoryID: "A.8", Description: "Sicherheitsprinzipien in der Systemarchitektur"},
{ID: "A.8.24", Name: "Sicheres Programmieren", Category: "technological", CategoryID: "A.8", Description: "Sichere Programmierpraktiken"},
{ID: "A.8.25", Name: "Sicherheitstests", Category: "technological", CategoryID: "A.8", Description: "Sicherheitstests in der Entwicklung und Abnahme"},
{ID: "A.8.26", Name: "Auslagerung der Entwicklung", Category: "technological", CategoryID: "A.8", Description: "Ueberwachung ausgelagerter Entwicklung"},
{ID: "A.8.27", Name: "Trennung von Umgebungen", Category: "technological", CategoryID: "A.8", Description: "Trennung von Entwicklungs-, Test- und Produktionsumgebungen"},
{ID: "A.8.28", Name: "Aenderungsmanagement", Category: "technological", CategoryID: "A.8", Description: "Formales Aenderungsmanagement"},
{ID: "A.8.29", Name: "Sicherheitstests in der Abnahme", Category: "technological", CategoryID: "A.8", Description: "Durchfuehrung von Sicherheitstests vor Abnahme"},
{ID: "A.8.30", Name: "Datenloeschung", Category: "technological", CategoryID: "A.8", Description: "Sichere Datenloeschung"},
{ID: "A.8.31", Name: "Datenmaskierung", Category: "technological", CategoryID: "A.8", Description: "Techniken zur Datenmaskierung"},
{ID: "A.8.32", Name: "Verhinderung von Datenverlust", Category: "technological", CategoryID: "A.8", Description: "DLP-Massnahmen"},
{ID: "A.8.33", Name: "Testinformationen", Category: "technological", CategoryID: "A.8", Description: "Schutz von Testinformationen"},
{ID: "A.8.34", Name: "Audit-Informationssysteme", Category: "technological", CategoryID: "A.8", Description: "Schutz von Audit-Tools und -systemen"},
}
// Default mappings: which modules cover which ISO controls
var DefaultISOModuleMappings = []ISOModuleMapping{
{
ModuleID: "iso-isms", ModuleName: "ISMS Grundlagen",
ISOControls: []string{"A.5.1", "A.5.2", "A.5.3", "A.5.4", "A.5.35", "A.5.36"},
CoverageLevel: "full",
},
{
ModuleID: "iso-risikobewertung", ModuleName: "Risikobewertung",
ISOControls: []string{"A.5.7", "A.5.8", "A.5.9", "A.5.10", "A.5.12", "A.5.13"},
CoverageLevel: "full",
},
{
ModuleID: "iso-zugangssteuerung", ModuleName: "Zugangssteuerung",
ISOControls: []string{"A.5.15", "A.5.16", "A.5.17", "A.5.18", "A.8.2", "A.8.3", "A.8.5"},
CoverageLevel: "full",
},
{
ModuleID: "iso-kryptografie", ModuleName: "Kryptografie",
ISOControls: []string{"A.8.20", "A.8.21", "A.8.24"},
CoverageLevel: "partial",
},
{
ModuleID: "iso-physisch", ModuleName: "Physische Sicherheit",
ISOControls: []string{"A.7.1", "A.7.2", "A.7.3", "A.7.4", "A.7.5", "A.7.7", "A.7.8"},
CoverageLevel: "full",
},
{
ModuleID: "dsgvo-tom", ModuleName: "Technisch-Organisatorische Massnahmen",
ISOControls: []string{"A.5.34", "A.8.10", "A.8.12", "A.8.30", "A.8.31"},
CoverageLevel: "partial",
},
{
ModuleID: "nis2-incident-response", ModuleName: "NIS2 Incident Response",
ISOControls: []string{"A.5.24", "A.5.25", "A.5.26", "A.5.27", "A.5.28", "A.6.8"},
CoverageLevel: "full",
},
{
ModuleID: "nis2-supply-chain", ModuleName: "NIS2 Lieferkettensicherheit",
ISOControls: []string{"A.5.19", "A.5.20", "A.5.21", "A.5.22", "A.5.23"},
CoverageLevel: "full",
},
{
ModuleID: "nis2-risikomanagement", ModuleName: "NIS2 Risikomanagement",
ISOControls: []string{"A.5.29", "A.5.30", "A.8.6", "A.8.7", "A.8.8", "A.8.9"},
CoverageLevel: "partial",
},
{
ModuleID: "dsgvo-grundlagen", ModuleName: "DSGVO Grundlagen",
ISOControls: []string{"A.5.31", "A.5.34", "A.6.2", "A.6.3"},
CoverageLevel: "partial",
},
}
// GetISOControlByID returns a control by its ID
func GetISOControlByID(id string) (ISOControl, bool) {
for _, c := range ISOControls {
if c.ID == id {
return c, true
}
}
return ISOControl{}, false
}
// GetISOControlsByCategory returns all controls in a category
func GetISOControlsByCategory(categoryID string) []ISOControl {
var result []ISOControl
for _, c := range ISOControls {
if c.CategoryID == categoryID {
result = append(result, c)
}
}
return result
}
// ISOCategorySummary provides a summary per ISO category
type ISOCategorySummary struct {
CategoryID string `json:"category_id"`
CategoryName string `json:"category_name"`
TotalControls int `json:"total_controls"`
CoveredFull int `json:"covered_full"`
CoveredPartial int `json:"covered_partial"`
NotCovered int `json:"not_covered"`
}

View File

@@ -1,74 +0,0 @@
package gci
import "time"
// MockModuleData provides fallback data when academy store is empty
func MockModuleData(tenantID string) []ModuleScore {
return []ModuleScore{
// DSGVO modules
{ModuleID: "dsgvo-grundlagen", ModuleName: "DSGVO Grundlagen", Assigned: 25, Completed: 22, Category: "dsgvo", RiskWeight: 2.0},
{ModuleID: "dsgvo-betroffenenrechte", ModuleName: "Betroffenenrechte", Assigned: 25, Completed: 18, Category: "dsgvo", RiskWeight: 2.5},
{ModuleID: "dsgvo-tom", ModuleName: "Technisch-Organisatorische Massnahmen", Assigned: 20, Completed: 17, Category: "dsgvo", RiskWeight: 2.5},
{ModuleID: "dsgvo-dsfa", ModuleName: "Datenschutz-Folgenabschaetzung", Assigned: 15, Completed: 10, Category: "dsgvo", RiskWeight: 2.0},
{ModuleID: "dsgvo-auftragsverarbeitung", ModuleName: "Auftragsverarbeitung", Assigned: 20, Completed: 16, Category: "dsgvo", RiskWeight: 2.0},
// NIS2 modules
{ModuleID: "nis2-risikomanagement", ModuleName: "NIS2 Risikomanagement", Assigned: 15, Completed: 11, Category: "nis2", RiskWeight: 3.0},
{ModuleID: "nis2-incident-response", ModuleName: "NIS2 Incident Response", Assigned: 15, Completed: 9, Category: "nis2", RiskWeight: 3.0},
{ModuleID: "nis2-supply-chain", ModuleName: "NIS2 Lieferkettensicherheit", Assigned: 10, Completed: 6, Category: "nis2", RiskWeight: 2.0},
{ModuleID: "nis2-management", ModuleName: "NIS2 Geschaeftsleitungspflicht", Assigned: 10, Completed: 8, Category: "nis2", RiskWeight: 3.0},
// ISO 27001 modules
{ModuleID: "iso-isms", ModuleName: "ISMS Grundlagen", Assigned: 20, Completed: 16, Category: "iso27001", RiskWeight: 2.0},
{ModuleID: "iso-risikobewertung", ModuleName: "Risikobewertung", Assigned: 15, Completed: 12, Category: "iso27001", RiskWeight: 2.0},
{ModuleID: "iso-zugangssteuerung", ModuleName: "Zugangssteuerung", Assigned: 20, Completed: 18, Category: "iso27001", RiskWeight: 2.0},
{ModuleID: "iso-kryptografie", ModuleName: "Kryptografie", Assigned: 10, Completed: 7, Category: "iso27001", RiskWeight: 1.5},
{ModuleID: "iso-physisch", ModuleName: "Physische Sicherheit", Assigned: 10, Completed: 9, Category: "iso27001", RiskWeight: 1.0},
// AI Act modules
{ModuleID: "ai-risikokategorien", ModuleName: "KI-Risikokategorien", Assigned: 15, Completed: 12, Category: "ai_act", RiskWeight: 2.5},
{ModuleID: "ai-transparenz", ModuleName: "KI-Transparenzpflichten", Assigned: 15, Completed: 10, Category: "ai_act", RiskWeight: 2.0},
{ModuleID: "ai-hochrisiko", ModuleName: "Hochrisiko-KI-Systeme", Assigned: 10, Completed: 6, Category: "ai_act", RiskWeight: 2.5},
{ModuleID: "ai-governance", ModuleName: "KI-Governance", Assigned: 10, Completed: 7, Category: "ai_act", RiskWeight: 2.0},
}
}
// MockCertificateData provides mock certificate validity dates
func MockCertificateData() map[string]time.Time {
now := time.Now()
return map[string]time.Time{
"dsgvo-grundlagen": now.AddDate(0, 8, 0), // valid 8 months
"dsgvo-betroffenenrechte": now.AddDate(0, 3, 0), // expiring in 3 months
"dsgvo-tom": now.AddDate(0, 10, 0), // valid
"dsgvo-dsfa": now.AddDate(0, -1, 0), // expired 1 month ago
"dsgvo-auftragsverarbeitung": now.AddDate(0, 6, 0),
"nis2-risikomanagement": now.AddDate(0, 5, 0),
"nis2-incident-response": now.AddDate(0, 2, 0), // expiring soon
"nis2-supply-chain": now.AddDate(0, -2, 0), // expired 2 months
"nis2-management": now.AddDate(0, 9, 0),
"iso-isms": now.AddDate(1, 0, 0),
"iso-risikobewertung": now.AddDate(0, 4, 0),
"iso-zugangssteuerung": now.AddDate(0, 11, 0),
"iso-kryptografie": now.AddDate(0, 1, 0), // expiring in 1 month
"iso-physisch": now.AddDate(0, 7, 0),
"ai-risikokategorien": now.AddDate(0, 6, 0),
"ai-transparenz": now.AddDate(0, 3, 0),
"ai-hochrisiko": now.AddDate(0, -3, 0), // expired 3 months
"ai-governance": now.AddDate(0, 5, 0),
}
}
// MockIncidentData returns mock incident counts for adjustment
func MockIncidentData() (openIncidents int, criticalIncidents int) {
return 3, 1
}
// MockGCIHistory returns mock historical GCI snapshots
func MockGCIHistory(tenantID string) []GCISnapshot {
now := time.Now()
return []GCISnapshot{
{TenantID: tenantID, Score: 58.2, MaturityLevel: MaturityReactive, AreaScores: map[string]float64{"dsgvo": 62, "nis2": 48, "iso27001": 60, "ai_act": 55}, CalculatedAt: now.AddDate(0, -3, 0)},
{TenantID: tenantID, Score: 62.5, MaturityLevel: MaturityDefined, AreaScores: map[string]float64{"dsgvo": 65, "nis2": 55, "iso27001": 63, "ai_act": 58}, CalculatedAt: now.AddDate(0, -2, 0)},
{TenantID: tenantID, Score: 67.8, MaturityLevel: MaturityDefined, AreaScores: map[string]float64{"dsgvo": 70, "nis2": 60, "iso27001": 68, "ai_act": 62}, CalculatedAt: now.AddDate(0, -1, 0)},
}
}

View File

@@ -1,104 +0,0 @@
package gci
import "time"
// Level 1: Module Score
type ModuleScore struct {
ModuleID string `json:"module_id"`
ModuleName string `json:"module_name"`
Assigned int `json:"assigned"`
Completed int `json:"completed"`
RawScore float64 `json:"raw_score"` // completions/assigned
ValidityFactor float64 `json:"validity_factor"` // 0.0-1.0
FinalScore float64 `json:"final_score"` // RawScore * ValidityFactor
RiskWeight float64 `json:"risk_weight"` // module criticality weight
Category string `json:"category"` // dsgvo, nis2, iso27001, ai_act
}
// Level 2: Risk-weighted Module Score per regulation area
type RiskWeightedScore struct {
AreaID string `json:"area_id"`
AreaName string `json:"area_name"`
Modules []ModuleScore `json:"modules"`
WeightedSum float64 `json:"weighted_sum"`
TotalWeight float64 `json:"total_weight"`
AreaScore float64 `json:"area_score"` // WeightedSum / TotalWeight
}
// Level 3: Regulation Area Score
type RegulationAreaScore struct {
RegulationID string `json:"regulation_id"` // dsgvo, nis2, iso27001, ai_act
RegulationName string `json:"regulation_name"` // Display name
Score float64 `json:"score"` // 0-100
Weight float64 `json:"weight"` // regulation weight in GCI
WeightedScore float64 `json:"weighted_score"` // Score * Weight
ModuleCount int `json:"module_count"`
CompletedCount int `json:"completed_count"`
}
// Level 4: GCI Result
type GCIResult struct {
TenantID string `json:"tenant_id"`
GCIScore float64 `json:"gci_score"` // 0-100
MaturityLevel string `json:"maturity_level"` // Optimized, Managed, Defined, Reactive, HighRisk
MaturityLabel string `json:"maturity_label"` // German label
CalculatedAt time.Time `json:"calculated_at"`
Profile string `json:"profile"` // default, nis2_relevant, ki_nutzer
AreaScores []RegulationAreaScore `json:"area_scores"`
CriticalityMult float64 `json:"criticality_multiplier"`
IncidentAdj float64 `json:"incident_adjustment"`
AuditTrail []AuditEntry `json:"audit_trail"`
}
// GCI Breakdown with all 4 levels
type GCIBreakdown struct {
GCIResult
Level1Modules []ModuleScore `json:"level1_modules"`
Level2Areas []RiskWeightedScore `json:"level2_areas"`
}
// MaturityLevel constants
const (
MaturityOptimized = "OPTIMIZED"
MaturityManaged = "MANAGED"
MaturityDefined = "DEFINED"
MaturityReactive = "REACTIVE"
MaturityHighRisk = "HIGH_RISK"
)
// Maturity level labels (German)
var MaturityLabels = map[string]string{
MaturityOptimized: "Optimiert",
MaturityManaged: "Gesteuert",
MaturityDefined: "Definiert",
MaturityReactive: "Reaktiv",
MaturityHighRisk: "Hohes Risiko",
}
// AuditEntry for score transparency
type AuditEntry struct {
Timestamp time.Time `json:"timestamp"`
Factor string `json:"factor"`
Description string `json:"description"`
Value float64 `json:"value"`
Impact string `json:"impact"` // positive, negative, neutral
}
// ComplianceMatrixEntry maps roles to regulations
type ComplianceMatrixEntry struct {
Role string `json:"role"`
RoleName string `json:"role_name"`
Regulations map[string]float64 `json:"regulations"` // regulation_id -> score
OverallScore float64 `json:"overall_score"`
RequiredModules int `json:"required_modules"`
CompletedModules int `json:"completed_modules"`
}
// GCI History snapshot
type GCISnapshot struct {
TenantID string `json:"tenant_id"`
Score float64 `json:"score"`
MaturityLevel string `json:"maturity_level"`
AreaScores map[string]float64 `json:"area_scores"`
CalculatedAt time.Time `json:"calculated_at"`
}

View File

@@ -1,118 +0,0 @@
package gci
// NIS2Role defines a NIS2 role classification
type NIS2Role struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
MandatoryModules []string `json:"mandatory_modules"`
Priority int `json:"priority"` // 1=highest
}
// NIS2RoleAssignment represents a user's NIS2 role
type NIS2RoleAssignment struct {
TenantID string `json:"tenant_id"`
UserID string `json:"user_id"`
UserName string `json:"user_name"`
RoleID string `json:"role_id"`
RoleName string `json:"role_name"`
AssignedAt string `json:"assigned_at"`
}
// NIS2 role definitions
var NIS2Roles = map[string]NIS2Role{
"N1": {
ID: "N1",
Name: "Geschaeftsleitung",
Description: "Leitungsorgane mit persoenlicher Haftung gemaess NIS2 Art. 20",
Priority: 1,
MandatoryModules: []string{
"nis2-management",
"nis2-risikomanagement",
"dsgvo-grundlagen",
"iso-isms",
},
},
"N2": {
ID: "N2",
Name: "IT-Sicherheit / CISO",
Description: "Verantwortliche fuer IT-Sicherheit und Cybersecurity",
Priority: 2,
MandatoryModules: []string{
"nis2-risikomanagement",
"nis2-incident-response",
"nis2-supply-chain",
"iso-zugangssteuerung",
"iso-kryptografie",
},
},
"N3": {
ID: "N3",
Name: "Kritische Funktionen",
Description: "Mitarbeiter in kritischen Geschaeftsprozessen",
Priority: 3,
MandatoryModules: []string{
"nis2-risikomanagement",
"nis2-incident-response",
"dsgvo-tom",
"iso-zugangssteuerung",
},
},
"N4": {
ID: "N4",
Name: "Allgemeine Mitarbeiter",
Description: "Alle Mitarbeiter mit IT-Zugang",
Priority: 4,
MandatoryModules: []string{
"nis2-risikomanagement",
"dsgvo-grundlagen",
"iso-isms",
},
},
"N5": {
ID: "N5",
Name: "Incident Response Team",
Description: "Mitglieder des IRT/CSIRT gemaess NIS2 Art. 21",
Priority: 2,
MandatoryModules: []string{
"nis2-incident-response",
"nis2-risikomanagement",
"nis2-supply-chain",
"iso-zugangssteuerung",
"iso-kryptografie",
"iso-isms",
},
},
}
// GetNIS2Role returns a NIS2 role by ID
func GetNIS2Role(roleID string) (NIS2Role, bool) {
r, ok := NIS2Roles[roleID]
return r, ok
}
// ListNIS2Roles returns all NIS2 roles sorted by priority
func ListNIS2Roles() []NIS2Role {
roles := []NIS2Role{}
// Return in priority order
order := []string{"N1", "N2", "N5", "N3", "N4"}
for _, id := range order {
if r, ok := NIS2Roles[id]; ok {
roles = append(roles, r)
}
}
return roles
}
// MockNIS2RoleAssignments returns mock role assignments
func MockNIS2RoleAssignments(tenantID string) []NIS2RoleAssignment {
return []NIS2RoleAssignment{
{TenantID: tenantID, UserID: "user-001", UserName: "Dr. Schmidt", RoleID: "N1", RoleName: "Geschaeftsleitung", AssignedAt: "2025-06-01"},
{TenantID: tenantID, UserID: "user-002", UserName: "M. Weber", RoleID: "N2", RoleName: "IT-Sicherheit / CISO", AssignedAt: "2025-06-01"},
{TenantID: tenantID, UserID: "user-003", UserName: "S. Mueller", RoleID: "N5", RoleName: "Incident Response Team", AssignedAt: "2025-07-15"},
{TenantID: tenantID, UserID: "user-004", UserName: "K. Fischer", RoleID: "N3", RoleName: "Kritische Funktionen", AssignedAt: "2025-08-01"},
{TenantID: tenantID, UserID: "user-005", UserName: "L. Braun", RoleID: "N3", RoleName: "Kritische Funktionen", AssignedAt: "2025-08-01"},
{TenantID: tenantID, UserID: "user-006", UserName: "A. Schwarz", RoleID: "N4", RoleName: "Allgemeine Mitarbeiter", AssignedAt: "2025-09-01"},
{TenantID: tenantID, UserID: "user-007", UserName: "T. Wagner", RoleID: "N4", RoleName: "Allgemeine Mitarbeiter", AssignedAt: "2025-09-01"},
}
}

View File

@@ -1,147 +0,0 @@
package gci
import "math"
// NIS2Score represents the NIS2-specific compliance score
type NIS2Score struct {
TenantID string `json:"tenant_id"`
OverallScore float64 `json:"overall_score"`
MaturityLevel string `json:"maturity_level"`
MaturityLabel string `json:"maturity_label"`
AreaScores []NIS2AreaScore `json:"area_scores"`
RoleCompliance []NIS2RoleScore `json:"role_compliance"`
}
// NIS2AreaScore represents a NIS2 compliance area
type NIS2AreaScore struct {
AreaID string `json:"area_id"`
AreaName string `json:"area_name"`
Score float64 `json:"score"`
Weight float64 `json:"weight"`
ModuleIDs []string `json:"module_ids"`
}
// NIS2RoleScore represents completion per NIS2 role
type NIS2RoleScore struct {
RoleID string `json:"role_id"`
RoleName string `json:"role_name"`
AssignedUsers int `json:"assigned_users"`
CompletionRate float64 `json:"completion_rate"`
MandatoryTotal int `json:"mandatory_total"`
MandatoryDone int `json:"mandatory_done"`
}
// NIS2 scoring areas with weights
// NIS2Score = 25% Management + 25% Incident + 30% IT Security + 20% Supply Chain
var nis2Areas = []struct {
ID string
Name string
Weight float64
ModuleIDs []string
}{
{
ID: "management", Name: "Management & Governance", Weight: 0.25,
ModuleIDs: []string{"nis2-management", "dsgvo-grundlagen", "iso-isms"},
},
{
ID: "incident", Name: "Vorfallsbehandlung", Weight: 0.25,
ModuleIDs: []string{"nis2-incident-response"},
},
{
ID: "it_security", Name: "IT-Sicherheit", Weight: 0.30,
ModuleIDs: []string{"nis2-risikomanagement", "iso-zugangssteuerung", "iso-kryptografie"},
},
{
ID: "supply_chain", Name: "Lieferkettensicherheit", Weight: 0.20,
ModuleIDs: []string{"nis2-supply-chain", "dsgvo-auftragsverarbeitung"},
},
}
// CalculateNIS2Score computes the NIS2-specific compliance score
func CalculateNIS2Score(tenantID string) *NIS2Score {
modules := MockModuleData(tenantID)
moduleMap := map[string]ModuleScore{}
for _, m := range modules {
moduleMap[m.ModuleID] = m
}
areaScores := []NIS2AreaScore{}
totalWeighted := 0.0
for _, area := range nis2Areas {
areaScore := NIS2AreaScore{
AreaID: area.ID,
AreaName: area.Name,
Weight: area.Weight,
ModuleIDs: area.ModuleIDs,
}
scoreSum := 0.0
count := 0
for _, modID := range area.ModuleIDs {
if m, ok := moduleMap[modID]; ok {
if m.Assigned > 0 {
scoreSum += float64(m.Completed) / float64(m.Assigned) * 100
}
count++
}
}
if count > 0 {
areaScore.Score = math.Round(scoreSum/float64(count)*10) / 10
}
totalWeighted += areaScore.Score * areaScore.Weight
areaScores = append(areaScores, areaScore)
}
overallScore := math.Round(totalWeighted*10) / 10
// Calculate role compliance
roleAssignments := MockNIS2RoleAssignments(tenantID)
roleScores := calculateNIS2RoleScores(roleAssignments, moduleMap)
return &NIS2Score{
TenantID: tenantID,
OverallScore: overallScore,
MaturityLevel: determineMaturityLevel(overallScore),
MaturityLabel: MaturityLabels[determineMaturityLevel(overallScore)],
AreaScores: areaScores,
RoleCompliance: roleScores,
}
}
func calculateNIS2RoleScores(assignments []NIS2RoleAssignment, moduleMap map[string]ModuleScore) []NIS2RoleScore {
// Count users per role
roleCounts := map[string]int{}
for _, a := range assignments {
roleCounts[a.RoleID]++
}
scores := []NIS2RoleScore{}
for roleID, role := range NIS2Roles {
rs := NIS2RoleScore{
RoleID: roleID,
RoleName: role.Name,
AssignedUsers: roleCounts[roleID],
MandatoryTotal: len(role.MandatoryModules),
}
completionSum := 0.0
for _, modID := range role.MandatoryModules {
if m, ok := moduleMap[modID]; ok {
if m.Assigned > 0 {
rate := float64(m.Completed) / float64(m.Assigned)
completionSum += rate
if rate >= 0.8 { // 80%+ = considered done
rs.MandatoryDone++
}
}
}
}
if rs.MandatoryTotal > 0 {
rs.CompletionRate = math.Round(completionSum/float64(rs.MandatoryTotal)*100*10) / 10
}
scores = append(scores, rs)
}
return scores
}

View File

@@ -1,59 +0,0 @@
package gci
import (
"math"
"time"
)
const (
// GracePeriodDays is the number of days after expiry during which
// the certificate still contributes (with declining factor)
GracePeriodDays = 180
// DecayStartDays is how many days before expiry the linear decay begins
DecayStartDays = 180
)
// CalculateValidityFactor computes the validity factor for a certificate
// based on its expiry date.
//
// Rules:
// - Certificate not yet expiring (>6 months): factor = 1.0
// - Certificate expiring within 6 months: linear decay from 1.0 to 0.5
// - Certificate expired: linear decay from 0.5 to 0.0 over grace period
// - Certificate expired beyond grace period: factor = 0.0
func CalculateValidityFactor(validUntil time.Time, now time.Time) float64 {
daysUntilExpiry := validUntil.Sub(now).Hours() / 24.0
if daysUntilExpiry > float64(DecayStartDays) {
// Not yet in decay window
return 1.0
}
if daysUntilExpiry > 0 {
// In pre-expiry decay window: linear from 1.0 to 0.5
fraction := daysUntilExpiry / float64(DecayStartDays)
return 0.5 + 0.5*fraction
}
// Certificate is expired
daysExpired := -daysUntilExpiry
if daysExpired > float64(GracePeriodDays) {
return 0.0
}
// In grace period: linear from 0.5 to 0.0
fraction := 1.0 - (daysExpired / float64(GracePeriodDays))
return math.Max(0, 0.5*fraction)
}
// IsExpired returns true if the certificate is past its validity date
func IsExpired(validUntil time.Time, now time.Time) bool {
return now.After(validUntil)
}
// IsExpiringSoon returns true if the certificate expires within the decay window
func IsExpiringSoon(validUntil time.Time, now time.Time) bool {
daysUntil := validUntil.Sub(now).Hours() / 24.0
return daysUntil > 0 && daysUntil <= float64(DecayStartDays)
}

View File

@@ -1,78 +0,0 @@
package gci
// WeightProfile defines regulation weights for different compliance profiles
type WeightProfile struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Weights map[string]float64 `json:"weights"` // regulation_id -> weight (0.0-1.0)
}
// Default weight profiles
var DefaultProfiles = map[string]WeightProfile{
"default": {
ID: "default",
Name: "Standard",
Description: "Ausgewogenes Profil fuer allgemeine Compliance",
Weights: map[string]float64{
"dsgvo": 0.30,
"nis2": 0.25,
"iso27001": 0.25,
"ai_act": 0.20,
},
},
"nis2_relevant": {
ID: "nis2_relevant",
Name: "NIS2-relevant",
Description: "Fuer Betreiber kritischer Infrastrukturen",
Weights: map[string]float64{
"dsgvo": 0.25,
"nis2": 0.35,
"iso27001": 0.25,
"ai_act": 0.15,
},
},
"ki_nutzer": {
ID: "ki_nutzer",
Name: "KI-Nutzer",
Description: "Fuer Organisationen mit KI-Einsatz",
Weights: map[string]float64{
"dsgvo": 0.25,
"nis2": 0.25,
"iso27001": 0.20,
"ai_act": 0.30,
},
},
}
// ModuleRiskWeights defines risk criticality per module type
var ModuleRiskWeights = map[string]float64{
"incident_response": 3.0,
"management_awareness": 3.0,
"data_protection": 2.5,
"it_security": 2.5,
"supply_chain": 2.0,
"risk_assessment": 2.0,
"access_control": 2.0,
"business_continuity": 2.0,
"employee_training": 1.5,
"documentation": 1.5,
"physical_security": 1.0,
"general": 1.0,
}
// GetProfile returns a weight profile by ID, defaulting to "default"
func GetProfile(profileID string) WeightProfile {
if p, ok := DefaultProfiles[profileID]; ok {
return p
}
return DefaultProfiles["default"]
}
// GetModuleRiskWeight returns the risk weight for a module category
func GetModuleRiskWeight(category string) float64 {
if w, ok := ModuleRiskWeights[category]; ok {
return w
}
return 1.0
}

View File

@@ -1,65 +0,0 @@
package industry
// ============================================================================
// Industry-Specific Compliance Templates (Phase 3.3)
// Static reference data — no database migration needed.
// ============================================================================
// IndustryTemplate represents a complete compliance package for a specific industry
type IndustryTemplate struct {
Slug string `json:"slug"`
Name string `json:"name"`
Description string `json:"description"`
Icon string `json:"icon"`
Regulations []string `json:"regulations"`
VVTTemplates []VVTTemplate `json:"vvt_templates"`
TOMRecommendations []TOMRecommendation `json:"tom_recommendations"`
RiskScenarios []RiskScenario `json:"risk_scenarios"`
}
// VVTTemplate represents a pre-configured processing activity record template
type VVTTemplate struct {
Name string `json:"name"`
Purpose string `json:"purpose"`
LegalBasis string `json:"legal_basis"`
DataCategories []string `json:"data_categories"`
DataSubjects []string `json:"data_subjects"`
RetentionPeriod string `json:"retention_period"`
}
// TOMRecommendation represents a recommended technical/organizational measure
type TOMRecommendation struct {
Category string `json:"category"`
Name string `json:"name"`
Description string `json:"description"`
Priority string `json:"priority"`
}
// RiskScenario represents an industry-specific data protection risk scenario
type RiskScenario struct {
Name string `json:"name"`
Description string `json:"description"`
Likelihood string `json:"likelihood"`
Impact string `json:"impact"`
Mitigation string `json:"mitigation"`
}
// ============================================================================
// API Response Types
// ============================================================================
// IndustryListResponse is the API response for listing all industries
type IndustryListResponse struct {
Industries []IndustrySummary `json:"industries"`
Total int `json:"total"`
}
// IndustrySummary is a condensed view of an industry template for list endpoints
type IndustrySummary struct {
Slug string `json:"slug"`
Name string `json:"name"`
Description string `json:"description"`
Icon string `json:"icon"`
RegulationCount int `json:"regulation_count"`
TemplateCount int `json:"template_count"`
}

View File

@@ -1,558 +0,0 @@
package industry
// ============================================================================
// Static Industry Template Data
// ============================================================================
// allTemplates holds all pre-configured industry compliance packages.
// This is static reference data embedded in the binary — no database required.
var allTemplates = []IndustryTemplate{
itSoftwareTemplate(),
healthcareTemplate(),
financeTemplate(),
manufacturingTemplate(),
}
// GetAllTemplates returns all available industry templates.
func GetAllTemplates() []IndustryTemplate {
return allTemplates
}
// GetTemplateBySlug returns the industry template matching the given slug,
// or nil if no match is found.
func GetTemplateBySlug(slug string) *IndustryTemplate {
for i := range allTemplates {
if allTemplates[i].Slug == slug {
return &allTemplates[i]
}
}
return nil
}
// ============================================================================
// IT & Software
// ============================================================================
func itSoftwareTemplate() IndustryTemplate {
return IndustryTemplate{
Slug: "it-software",
Name: "IT & Software",
Description: "Compliance-Paket fuer IT-Unternehmen, SaaS-Anbieter und Softwareentwickler mit Fokus auf AI Act, DSGVO fuer Cloud-Dienste und NIS2.",
Icon: "\U0001F4BB",
Regulations: []string{"DSGVO", "AI Act", "NIS2", "ePrivacy"},
VVTTemplates: []VVTTemplate{
{
Name: "SaaS-Kundendaten",
Purpose: "Verarbeitung personenbezogener Daten von SaaS-Kunden zur Bereitstellung der vertraglichen Dienstleistung, einschliesslich Account-Verwaltung, Nutzungsanalyse und Abrechnung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung)",
DataCategories: []string{"Name", "E-Mail-Adresse", "Unternehmenszugehoerigkeit", "Nutzungsdaten", "Rechnungsdaten", "IP-Adresse"},
DataSubjects: []string{"Kunden", "Endnutzer der SaaS-Plattform"},
RetentionPeriod: "Vertragsdauer + 10 Jahre (handelsrechtliche Aufbewahrungspflicht)",
},
{
Name: "Cloud-Hosting",
Purpose: "Speicherung und Verarbeitung von Kundendaten in Cloud-Infrastruktur (IaaS/PaaS) zur Gewaehrleistung der Verfuegbarkeit und Skalierbarkeit der Dienste.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 28 DSGVO (Auftragsverarbeitung)",
DataCategories: []string{"Alle vom Kunden eingestellten Daten", "Metadaten", "Logdateien", "Zugangsdaten"},
DataSubjects: []string{"Kunden", "Endnutzer", "Mitarbeiter der Kunden"},
RetentionPeriod: "Vertragsdauer + 30 Tage Backup-Retention",
},
{
Name: "KI-Modelltraining",
Purpose: "Verwendung von (pseudonymisierten) Daten zum Training, zur Validierung und Verbesserung von KI-/ML-Modellen unter Einhaltung des AI Act.",
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), ggf. Art. 6 Abs. 1 lit. a (Einwilligung)",
DataCategories: []string{"Pseudonymisierte Nutzungsdaten", "Textdaten", "Interaktionsmuster", "Feedback-Daten"},
DataSubjects: []string{"Nutzer der KI-Funktionen", "Trainingsdaten-Quellen"},
RetentionPeriod: "Bis Modell-Abloesung, max. 5 Jahre; Trainingsdaten nach Pseudonymisierung unbegrenzt",
},
{
Name: "Software-Analytics",
Purpose: "Erhebung anonymisierter und pseudonymisierter Nutzungsstatistiken zur Produktverbesserung, Fehleranalyse und Performance-Monitoring.",
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse)",
DataCategories: []string{"Geraetemertkmale", "Browserinformationen", "Nutzungsverhalten", "Crash-Reports", "Performance-Metriken"},
DataSubjects: []string{"Endnutzer der Software"},
RetentionPeriod: "Rohdaten 90 Tage, aggregierte Daten 2 Jahre",
},
{
Name: "Newsletter/Marketing",
Purpose: "Versand von Produkt-Newslettern, Release-Benachrichtigungen und Marketing-Kommunikation an registrierte Nutzer und Interessenten.",
LegalBasis: "Art. 6 Abs. 1 lit. a DSGVO (Einwilligung)",
DataCategories: []string{"E-Mail-Adresse", "Name", "Unternehmen", "Oeffnungs- und Klickraten", "Abonnement-Praeferenzen"},
DataSubjects: []string{"Newsletter-Abonnenten", "Leads", "Bestandskunden"},
RetentionPeriod: "Bis Widerruf der Einwilligung + 30 Tage Abwicklung",
},
{
Name: "Bewerbermanagement",
Purpose: "Verarbeitung von Bewerberdaten im Rahmen des Recruiting-Prozesses einschliesslich Sichtung, Kommunikation und Entscheidungsfindung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (vorvertragliche Massnahmen), ss 26 BDSG",
DataCategories: []string{"Lebenslauf", "Anschreiben", "Zeugnisse", "Kontaktdaten", "Gehaltsvorstellungen", "Bewertungsnotizen"},
DataSubjects: []string{"Bewerber", "Empfehlungsgeber"},
RetentionPeriod: "6 Monate nach Abschluss des Verfahrens (AGG-Frist), bei Einwilligung laenger",
},
},
TOMRecommendations: []TOMRecommendation{
{
Category: "encryption",
Name: "Verschluesselung at rest und in transit",
Description: "Alle gespeicherten Daten mit AES-256 verschluesseln. Saemtlichen Netzwerkverkehr ueber TLS 1.3 absichern. Zertifikats-Management automatisieren.",
Priority: "critical",
},
{
Category: "access_control",
Name: "Multi-Faktor-Authentifizierung (MFA)",
Description: "MFA fuer alle administrativen Zugaenge, Produktionssysteme und CI/CD-Pipelines erzwingen. FIDO2/WebAuthn bevorzugen.",
Priority: "critical",
},
{
Category: "monitoring",
Name: "Penetration Testing",
Description: "Regelmaessige externe Penetrationstests (mind. jaehrlich) und kontinuierliche Schwachstellenscans der oeffentlich erreichbaren Infrastruktur durchfuehren.",
Priority: "high",
},
{
Category: "development",
Name: "Code Reviews und Secure Coding",
Description: "Verpflichtende Code-Reviews fuer alle Aenderungen. SAST/DAST-Tools in die CI/CD-Pipeline integrieren. OWASP Top 10 als Mindeststandard.",
Priority: "high",
},
{
Category: "supply_chain",
Name: "Dependency Scanning",
Description: "Automatisiertes Scanning aller Abhaengigkeiten (SBOM) auf bekannte Schwachstellen. Alerts bei kritischen CVEs. Regelmaessige Updates erzwingen.",
Priority: "high",
},
{
Category: "incident_response",
Name: "Incident Response Plan",
Description: "Dokumentierter Incident-Response-Prozess mit definierten Eskalationsstufen, Meldepflichten (72h DSGVO) und regelmaessigen Uebungen (Tabletop Exercises).",
Priority: "critical",
},
},
RiskScenarios: []RiskScenario{
{
Name: "Datenleck durch Cloud-Fehlkonfiguration",
Description: "Oeffentlich zugaengliche S3-Buckets, fehlende Netzwerk-Segmentierung oder falsch konfigurierte Firewalls legen Kundendaten offen.",
Likelihood: "high",
Impact: "critical",
Mitigation: "Infrastructure-as-Code mit automatisierten Compliance-Checks (z.B. Checkov, tfsec), Cloud Security Posture Management (CSPM) einsetzen, regelmaessige Audits der Cloud-Konfiguration.",
},
{
Name: "Supply-Chain-Angriff",
Description: "Kompromittierte Abhaengigkeit (npm, PyPI, Go-Module) schleust Schadcode in den Build-Prozess ein und gelangt in die Produktionsumgebung.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "Dependency Pinning, Signaturtruefung, SBOM-Generierung, private Registries, regelmaessige Audits aller Drittanbieter-Komponenten.",
},
{
Name: "KI-Bias und Diskriminierung",
Description: "KI-Modelle produzieren diskriminierende Ergebnisse aufgrund verzerrter Trainingsdaten. Verstoss gegen AI Act und Gleichbehandlungsgrundsaetze.",
Likelihood: "medium",
Impact: "high",
Mitigation: "Bias-Audits vor und nach Deployment, diverse Trainingsdaten, Erklaerbarkeits-Dokumentation gemaess AI Act, menschliche Ueberpruefung (Human-in-the-Loop).",
},
{
Name: "Insider-Bedrohung",
Description: "Ein Mitarbeiter mit privilegiertem Zugang exfiltriert Kundendaten, Quellcode oder Geschaeftsgeheimnisse — absichtlich oder durch Social Engineering.",
Likelihood: "low",
Impact: "critical",
Mitigation: "Least-Privilege-Prinzip, privilegierte Zugangssteuerung (PAM), Audit-Logging aller Admin-Aktionen, Vier-Augen-Prinzip fuer kritische Operationen, Security-Awareness-Trainings.",
},
},
}
}
// ============================================================================
// Gesundheitswesen
// ============================================================================
func healthcareTemplate() IndustryTemplate {
return IndustryTemplate{
Slug: "healthcare",
Name: "Gesundheitswesen",
Description: "Compliance-Paket fuer Arztpraxen, Krankenhaeuser, Labore und Gesundheits-IT mit besonderem Fokus auf Art. 9 DSGVO (besondere Datenkategorien) und Patientendatenschutz.",
Icon: "\U0001F3E5",
Regulations: []string{"DSGVO", "BDSG \u00a722", "SGB V", "MDR", "DiGAV"},
VVTTemplates: []VVTTemplate{
{
Name: "Patientenakte (ePA)",
Purpose: "Fuehrung elektronischer Patientenakten zur medizinischen Dokumentation, Behandlungsplanung und abrechnungstechnischen Erfassung.",
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO i.V.m. \u00a722 BDSG, \u00a7630f BGB (Dokumentationspflicht)",
DataCategories: []string{"Diagnosen", "Befunde", "Medikation", "Vitalwerte", "Anamnese", "Stammdaten", "Versicherungsdaten"},
DataSubjects: []string{"Patienten"},
RetentionPeriod: "10 Jahre nach Abschluss der Behandlung (\u00a7630f BGB), bei Strahlentherapie 30 Jahre",
},
{
Name: "Terminverwaltung",
Purpose: "Planung, Vergabe und Erinnerung von Behandlungsterminen einschliesslich Online-Terminbuchung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 9 Abs. 2 lit. h DSGVO",
DataCategories: []string{"Name", "Kontaktdaten", "Terminzeitpunkt", "Fachrichtung/Behandlungsgrund", "Versicherungsstatus"},
DataSubjects: []string{"Patienten", "Angehoerige (bei Terminerstellung fuer Dritte)"},
RetentionPeriod: "Vergangene Termine: 1 Jahr, bei medizinischer Relevanz gemaess Patientenakte",
},
{
Name: "Labor- und Befunddaten",
Purpose: "Erfassung, Uebermittlung und Archivierung von Laborergebnissen, bildgebenden Befunden und pathologischen Berichten.",
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO, \u00a710 MBO-Ae",
DataCategories: []string{"Laborwerte", "Bildgebung (DICOM)", "Pathologiebefunde", "Mikrobiologische Ergebnisse", "Genetische Daten"},
DataSubjects: []string{"Patienten"},
RetentionPeriod: "10 Jahre, genetische Daten 30 Jahre",
},
{
Name: "Telemedizin",
Purpose: "Durchfuehrung von Videosprechstunden und telemedizinischen Konsultationen einschliesslich Uebertragung medizinischer Daten.",
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO, \u00a7630a BGB, Fernbehandlungs-Richtlinien",
DataCategories: []string{"Audio-/Videodaten", "Chatprotokolle", "Uebermittelte Dokumente", "Verbindungsmetadaten", "Behandlungsnotizen"},
DataSubjects: []string{"Patienten", "Behandelnde Aerzte"},
RetentionPeriod: "Aufzeichnungen gemaess Patientenakte (10 Jahre), Verbindungsdaten 90 Tage",
},
{
Name: "Forschungsdaten",
Purpose: "Verwendung pseudonymisierter oder anonymisierter Patientendaten fuer klinische Studien und medizinische Forschung.",
LegalBasis: "Art. 9 Abs. 2 lit. j DSGVO, \u00a727 BDSG, ggf. Einwilligung gemaess Art. 9 Abs. 2 lit. a",
DataCategories: []string{"Pseudonymisierte Diagnosen", "Behandlungsverlaeufe", "Demografische Daten", "Genetische Daten (anonymisiert)", "Studienergebnisse"},
DataSubjects: []string{"Studienteilnehmer", "Patienten (retrospektiv, pseudonymisiert)"},
RetentionPeriod: "Studienende + 15 Jahre (GCP-ICH), Forschungsdaten gemaess Foerderrichtlinien",
},
{
Name: "Abrechnung (KV/Krankenversicherung)",
Purpose: "Erstellung und Uebermittlung von Abrechnungsdaten an Kassenaerztliche Vereinigungen und Krankenkassen.",
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a7284 SGB V, \u00a7295 SGB V",
DataCategories: []string{"Versichertennummer", "Diagnose-Codes (ICD-10)", "Leistungsziffern (EBM/GOAe)", "Behandlungsdaten", "Zuzahlungsstatus"},
DataSubjects: []string{"Patienten", "Versicherte"},
RetentionPeriod: "10 Jahre (steuerrechtlich), Abrechnungsdaten 4 Jahre (\u00a7305 SGB V)",
},
},
TOMRecommendations: []TOMRecommendation{
{
Category: "encryption",
Name: "Ende-zu-Ende-Verschluesselung",
Description: "Saemtliche Kommunikation mit Gesundheitsdaten (E-Mail, Telemedizin, Befunduebermittlung) Ende-zu-Ende verschluesseln. Zertifizierte Loesungen gemaess gematik-Spezifikation einsetzen.",
Priority: "critical",
},
{
Category: "access_control",
Name: "Rollenbasierte Zugriffskontrolle (RBAC)",
Description: "Feingranulare Zugriffsrechte basierend auf Behandlungskontext: Nur behandelnde Aerzte sehen relevante Patientendaten. Need-to-know-Prinzip konsequent umsetzen.",
Priority: "critical",
},
{
Category: "monitoring",
Name: "Audit-Logging",
Description: "Lueckenloses Protokollieren aller Zugriffe auf Patientendaten mit Zeitstempel, Benutzer, Aktion und Begruendung. Logs manipulationssicher speichern (WORM).",
Priority: "critical",
},
{
Category: "physical_security",
Name: "Physische Sicherheit",
Description: "Zutrittskontrolle zu Serverraeumen und medizinischen Arbeitsbereichen. Bildschirmsperren, Clean-Desk-Policy. Sicherer Umgang mit physischen Patientenakten.",
Priority: "high",
},
{
Category: "data_minimization",
Name: "Pseudonymisierung",
Description: "Konsequente Pseudonymisierung bei Datenweitergabe (Forschung, Qualitaetssicherung, Abrechnung). Zuordnungstabellen separat und besonders geschuetzt speichern.",
Priority: "high",
},
},
RiskScenarios: []RiskScenario{
{
Name: "Unbefugter Zugriff auf Patientendaten",
Description: "Mitarbeiter ohne Behandlungsbezug greifen auf Patientenakten zu (z.B. prominente Patienten). Verstoss gegen aerztliche Schweigepflicht und DSGVO.",
Likelihood: "high",
Impact: "critical",
Mitigation: "Striktes RBAC mit Behandlungskontext-Pruefung, automatische Anomalie-Erkennung bei ungewoehnlichen Zugriffen, regelmaessige Audit-Log-Auswertung, Sanktionskatalog.",
},
{
Name: "Ransomware-Angriff auf Krankenhaus-IT",
Description: "Verschluesselungstrojaner legt Krankenhaus-Informationssystem lahm. Patientenversorgung gefaehrdet, Notbetrieb erforderlich.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "Netzwerksegmentierung (Medizingeraete, Verwaltung, Gaeste), Offline-Backups, Notfallplaene fuer Papierbetrieb, regelmaessige Sicherheitsupdates, Mitarbeiterschulung gegen Phishing.",
},
{
Name: "Datenverlust bei Systemausfall",
Description: "Hardware-Defekt oder Softwarefehler fuehrt zum Verlust aktueller Patientendaten, Befunde oder Medikationsplaene.",
Likelihood: "medium",
Impact: "high",
Mitigation: "Redundante Systeme (Clustering), automatische Backups mit verifizierter Wiederherstellung, unterbrechungsfreie Stromversorgung (USV), Disaster-Recovery-Plan mit RTOs unter 4 Stunden.",
},
{
Name: "Verletzung der aerztlichen Schweigepflicht",
Description: "Versehentliche oder vorsaetzliche Weitergabe von Patientendaten an Unberechtigte (z.B. Angehoerige ohne Vollmacht, Arbeitgeber, Medien).",
Likelihood: "medium",
Impact: "high",
Mitigation: "Schulungen zur Schweigepflicht (\u00a7203 StGB), klare Prozesse fuer Auskunftsersuchen, Dokumentation von Einwilligungen und Vollmachten, sichere Kommunikationskanaele.",
},
},
}
}
// ============================================================================
// Finanzdienstleister
// ============================================================================
func financeTemplate() IndustryTemplate {
return IndustryTemplate{
Slug: "finance",
Name: "Finanzdienstleister",
Description: "Compliance-Paket fuer Banken, Versicherungen, Zahlungsdienstleister und FinTechs mit Fokus auf BaFin-Anforderungen, PSD2 und Geldwaeschepraeventions.",
Icon: "\U0001F3E6",
Regulations: []string{"DSGVO", "KWG", "ZAG", "GwG", "MaRisk", "BAIT/DORA", "PSD2"},
VVTTemplates: []VVTTemplate{
{
Name: "Kontoeroeffnung / KYC",
Purpose: "Identitaetspruefung und Legitimation von Neukunden im Rahmen der Know-Your-Customer-Pflichten gemaess Geldwaeschegesetz.",
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a710 GwG, \u00a7154 AO",
DataCategories: []string{"Personalausweisdaten", "Adressdaten", "Geburtsdatum", "Staatsangehoerigkeit", "PEP-Status", "Wirtschaftliche Berechtigung", "Video-Identifikation"},
DataSubjects: []string{"Neukunden", "Wirtschaftlich Berechtigte", "Vertretungsberechtigte"},
RetentionPeriod: "5 Jahre nach Ende der Geschaeftsbeziehung (\u00a78 GwG), Identifizierungsdaten 10 Jahre",
},
{
Name: "Zahlungsverarbeitung",
Purpose: "Ausfuehrung und Dokumentation von Zahlungstransaktionen (Ueberweisungen, Lastschriften, Kartenzahlungen) im Rahmen der Kontovertragserfullung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), \u00a7675f BGB, PSD2",
DataCategories: []string{"IBAN/Kontonummer", "Transaktionsbetrag", "Verwendungszweck", "Empfaengerdaten", "Zeitstempel", "Autorisierungsdaten"},
DataSubjects: []string{"Kontoinhaber", "Zahlungsempfaenger", "Zahlungspflichtige"},
RetentionPeriod: "10 Jahre (\u00a7257 HGB, \u00a7147 AO)",
},
{
Name: "Kreditpruefung / Scoring",
Purpose: "Bonitaetspruefung und Kreditwuerdigkeitsbewertung auf Basis interner und externer Daten zur Kreditentscheidung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (vorvertragliche Massnahmen), \u00a731 BDSG (Scoring)",
DataCategories: []string{"Einkommensnachweise", "Schufa-Score", "Beschaeftigungsstatus", "Bestehende Verbindlichkeiten", "Sicherheiten", "Scoring-Ergebnis"},
DataSubjects: []string{"Kreditantragsteller", "Buergen", "Mithaftende"},
RetentionPeriod: "Kreditlaufzeit + 3 Jahre, bei Ablehnung 6 Monate",
},
{
Name: "Wertpapierhandel",
Purpose: "Ausfuehrung und Dokumentation von Wertpapiergeschaeften, Anlageberatung und Geeignetheitspruefung.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO, \u00a763 WpHG (Aufzeichnungspflichten), MiFID II",
DataCategories: []string{"Depotdaten", "Orderdaten", "Risikoprofil", "Anlageerfahrung", "Geeignetheitserklaerung", "Telefonaufzeichnungen"},
DataSubjects: []string{"Depotinhaber", "Bevollmaechtigte", "Anlageberater"},
RetentionPeriod: "10 Jahre (\u00a7257 HGB), Telefonaufzeichnungen 5 Jahre (MiFID II)",
},
{
Name: "Geldwaesche-Monitoring",
Purpose: "Kontinuierliche Ueberwachung von Transaktionsmustern zur Erkennung verdaechtiger Aktivitaeten und Erfuellung der Meldepflichten gegenueber der FIU.",
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a325h KWG, \u00a756 GwG",
DataCategories: []string{"Transaktionshistorie", "Risikobewertung", "Verdachtsmeldungen (SAR)", "PEP-Screening-Ergebnisse", "Sanktionslistenabgleich"},
DataSubjects: []string{"Kunden", "Transaktionspartner", "Verdachtspersonen"},
RetentionPeriod: "5 Jahre nach Ende der Geschaeftsbeziehung (\u00a78 GwG), Verdachtsmeldungen 10 Jahre",
},
{
Name: "Versicherungsantraege",
Purpose: "Verarbeitung von Antrags- und Risikodaten zur Pruefung, Annahme und Verwaltung von Versicherungsvertraegen.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), bei Gesundheitsdaten Art. 9 Abs. 2 lit. f DSGVO",
DataCategories: []string{"Antragsdaten", "Gesundheitsfragen", "Schadenhistorie", "Risikofaktoren", "Praemienberechnung", "Leistungsansprueche"},
DataSubjects: []string{"Versicherungsnehmer", "Versicherte Personen", "Bezugsberechtigte", "Geschaedigte"},
RetentionPeriod: "Vertragsdauer + 10 Jahre (Verjaehrung), Lebensversicherung bis Ablauf aller Ansprueche",
},
},
TOMRecommendations: []TOMRecommendation{
{
Category: "encryption",
Name: "HSM fuer Schluesselverwaltung",
Description: "Hardware Security Modules (HSM) fuer kryptographische Schluessel, insbesondere bei Zahlungsverkehr und digitalen Signaturen. PCI-DSS-konform.",
Priority: "critical",
},
{
Category: "monitoring",
Name: "Transaktionsmonitoring",
Description: "Echtzeit-Ueberwachung aller Finanztransaktionen auf Anomalien, Betrugsversuche und verdaechtige Muster. Regelbasierte und KI-gestuetzte Erkennung.",
Priority: "critical",
},
{
Category: "access_control",
Name: "Vier-Augen-Prinzip",
Description: "Kritische Transaktionen (Kreditfreigaben, Grossueberweisungen, Konfigurationsaenderungen) benoetigen Freigabe durch zwei unabhaengige Personen.",
Priority: "critical",
},
{
Category: "network_security",
Name: "DDoS-Schutz",
Description: "Mehrstufiger DDoS-Schutz fuer Online-Banking und Zahlungsverkehr-Infrastruktur. Redundante Anbindung, Traffic-Scrubbing, automatische Skalierung.",
Priority: "high",
},
{
Category: "business_continuity",
Name: "Backup und Disaster Recovery",
Description: "Taeglich gesicherte Datenbanken mit geografisch getrennter Aufbewahrung. RTO unter 2 Stunden fuer Kernbanksysteme, RPO unter 15 Minuten.",
Priority: "critical",
},
{
Category: "testing",
Name: "Penetration Testing (TIBER-EU)",
Description: "Threat-Intelligence-basierte Red-Teaming-Tests gemaess TIBER-EU-Framework. Jaehrliche Durchfuehrung durch externe, BaFin-akkreditierte Tester.",
Priority: "high",
},
},
RiskScenarios: []RiskScenario{
{
Name: "Betrug und Identitaetsdiebstahl",
Description: "Kriminelle nutzen gestohlene Identitaetsdaten zur Kontoeroeffnung, Kreditaufnahme oder fuer nicht autorisierte Transaktionen.",
Likelihood: "high",
Impact: "high",
Mitigation: "Starke Kundenauthentifizierung (SCA) gemaess PSD2, Echtzeit-Betrugs-Scoring, Video-Ident mit Liveness-Detection, biometrische Verifikation, Transaktionslimits.",
},
{
Name: "Insiderhandel-Datenleck",
Description: "Vorabinformationen ueber boersenrelevante Entscheidungen (M&A, Quartalsberichte) gelangen an Unberechtigte.",
Likelihood: "low",
Impact: "critical",
Mitigation: "Insiderverzeichnisse fuehren, Chinese Walls zwischen Abteilungen, Kommunikations-Monitoring, Handelsverbote fuer Insider, regelmaessige Compliance-Schulungen.",
},
{
Name: "Systemausfall bei Zahlungsverkehr",
Description: "Ausfall des Kernbanksystems oder der Zahlungsverkehrsinfrastruktur fuehrt zu Nicht-Verfuegbarkeit von Transaktionen, Geldautomaten und Online-Banking.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "Hochverfuegbarkeits-Architektur (Active-Active), automatischer Failover, regelmaessige Disaster-Recovery-Tests, Notfall-Kommunikationsplan fuer Kunden und BaFin.",
},
{
Name: "Geldwaesche-Compliance-Verstoss",
Description: "Mangelhafte KYC-Prozesse oder unzureichendes Transaktionsmonitoring fuehren zu einem Compliance-Verstoss mit BaFin-Sanktionen.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "Automatisiertes Transaction-Monitoring mit regelmaessiger Kalibrierung, jaehrliche GwG-Schulungen, interne Revision der AML-Prozesse, PEP- und Sanktionslisten-Screening in Echtzeit.",
},
},
}
}
// ============================================================================
// Produktion / Industrie
// ============================================================================
func manufacturingTemplate() IndustryTemplate {
return IndustryTemplate{
Slug: "manufacturing",
Name: "Produktion / Industrie",
Description: "Compliance-Paket fuer produzierende Unternehmen mit Fokus auf NIS2-Anforderungen, OT-Security, IoT-Sicherheit und Schutz industrieller Steuerungssysteme.",
Icon: "\U0001F3ED",
Regulations: []string{"DSGVO", "NIS2", "Maschinenverordnung", "BetrSichV", "IT-Sicherheitsgesetz 2.0"},
VVTTemplates: []VVTTemplate{
{
Name: "Mitarbeiterdaten / Zeiterfassung",
Purpose: "Erfassung von Arbeitszeiten, Schichtplanung und Anwesenheitsdaten zur Lohnabrechnung und Einhaltung des Arbeitszeitgesetzes.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), \u00a726 BDSG, \u00a716 ArbZG",
DataCategories: []string{"Mitarbeiterstammdaten", "Arbeitszeitdaten", "Schichtplaene", "Fehlzeiten", "Ueberstunden", "Zutrittsdaten"},
DataSubjects: []string{"Mitarbeiter", "Leiharbeiter", "Praktikanten"},
RetentionPeriod: "Lohnunterlagen 6 Jahre (\u00a7257 HGB), Arbeitszeitnachweise 2 Jahre (\u00a716 ArbZG)",
},
{
Name: "Lieferantenmanagement",
Purpose: "Verwaltung von Lieferantendaten, Bestellprozessen und Qualitaetsbewertungen im Rahmen der Supply-Chain.",
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 6 Abs. 1 lit. f (berechtigtes Interesse)",
DataCategories: []string{"Ansprechpartner", "Kontaktdaten", "Lieferkonditionen", "Qualitaetsbewertungen", "Zertifizierungen", "Bankverbindungen"},
DataSubjects: []string{"Ansprechpartner der Lieferanten", "Subunternehmer"},
RetentionPeriod: "Vertragsdauer + 10 Jahre (Gewaehrleistung und Steuerrecht)",
},
{
Name: "IoT-Sensordaten",
Purpose: "Erfassung und Auswertung von Sensor- und Maschinendaten fuer Produktionsoptimierung, Predictive Maintenance und Qualitaetssicherung.",
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), bei Personenbezug ggf. Art. 6 Abs. 1 lit. a (Einwilligung)",
DataCategories: []string{"Maschinenkennung", "Temperatur/Druck/Vibration", "Produktionszaehler", "Energieverbrauch", "Standortdaten (Intralogistik)", "Bediener-ID (falls zugeordnet)"},
DataSubjects: []string{"Maschinenbediener (indirekt)", "Instandhalter"},
RetentionPeriod: "Rohdaten 1 Jahr, aggregierte Daten 5 Jahre, qualitaetsrelevant 10 Jahre",
},
{
Name: "Qualitaetskontrolle",
Purpose: "Dokumentation von Qualitaetspruefungen, Chargenrueckverfolgbarkeit und Reklamationsmanagement.",
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), Maschinenverordnung, Produkthaftung",
DataCategories: []string{"Pruefprotokolle", "Chargennnummern", "Messwerte", "Pruefer-ID", "Fotos/Videos der Pruefung", "Reklamationsdaten"},
DataSubjects: []string{"Pruefer", "Reklamierende Kunden"},
RetentionPeriod: "Produktlebensdauer + 10 Jahre (Produkthaftung), sicherheitskritisch 30 Jahre",
},
{
Name: "Videoueberwachung",
Purpose: "Ueberwachung von Produktionshallen, Lagerbereichen und Aussenbereichen zum Schutz vor Diebstahl, Sabotage und zur Arbeitssicherheit.",
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), Betriebsvereinbarung",
DataCategories: []string{"Videoaufnahmen", "Zeitstempel", "Kamerastandort", "Bewegungserkennung"},
DataSubjects: []string{"Mitarbeiter", "Besucher", "Lieferanten", "Unbefugte"},
RetentionPeriod: "72 Stunden Standard, bei Vorfaellen bis Abschluss der Ermittlung (max. 10 Tage ohne konkreten Anlass)",
},
{
Name: "Zugangskontrolle (physisch und logisch)",
Purpose: "Steuerung und Protokollierung des Zutritts zu Produktionsbereichen, Gefahrstofflagern und IT-Raeumen mittels Chipkarten/Biometrie.",
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), BetrSichV, bei Biometrie Art. 9 Abs. 2 lit. b DSGVO",
DataCategories: []string{"Mitarbeiter-ID", "Zutrittszeitpunkt", "Zutrittsbereich", "Chipkartennummer", "Biometrische Daten (optional)"},
DataSubjects: []string{"Mitarbeiter", "Externe Dienstleister", "Besucher"},
RetentionPeriod: "Zutrittsprotokolle 90 Tage, sicherheitsrelevante Bereiche 1 Jahr",
},
},
TOMRecommendations: []TOMRecommendation{
{
Category: "network_security",
Name: "Netzwerksegmentierung (IT/OT)",
Description: "Strikte Trennung von Office-IT und Operational Technology (OT) durch DMZ, Firewalls und unidirektionale Gateways. Purdue-Modell als Referenzarchitektur.",
Priority: "critical",
},
{
Category: "patch_management",
Name: "IoT-Patch-Management",
Description: "Zentrales Management aller IoT-Geraete und Firmware-Versionen. Geplante Wartungsfenster fuer Updates, Risikobewertung vor Patches auf Produktionssystemen.",
Priority: "high",
},
{
Category: "physical_security",
Name: "Physische Zutrittskontrolle",
Description: "Mehrstufiges Zutrittskonzept (Gelaende, Gebaeude, Produktionshalle, Leitstand). Besuchermanagement, Begleitung in Sicherheitsbereichen, Videoprotokollierung.",
Priority: "high",
},
{
Category: "business_continuity",
Name: "Backup industrieller Steuerungen",
Description: "Regelmaessige Sicherung von SPS-Programmen, SCADA-Konfigurationen und Roboterprogrammen. Offline-Aufbewahrung der Backups, dokumentierte Restore-Prozeduren.",
Priority: "critical",
},
{
Category: "incident_response",
Name: "Notfallplaene fuer Produktionsausfall",
Description: "Dokumentierte Notfallplaene fuer Cyber-Angriffe auf OT-Systeme. Manuelle Rueckfallebenen, Kommunikationsketten, Kontakt zu BSI und CERT. Jaehrliche Uebungen.",
Priority: "critical",
},
},
RiskScenarios: []RiskScenario{
{
Name: "OT-Cyberangriff auf Produktionsanlage",
Description: "Angreifer kompromittiert SCADA/SPS-Systeme und manipuliert Produktionsprozesse. Moegliche Folgen: Produktionsausfall, Qualitaetsmaengel, Personengefaehrdung.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "Netzwerksegmentierung (IT/OT), Anomalie-Erkennung im OT-Netzwerk, Haertung der Steuerungssysteme, Deaktivierung nicht benoetigter Dienste und Ports, regelmaessige Sicherheitsaudits.",
},
{
Name: "Ausfall der Lieferkette durch Cybervorfall",
Description: "Ein Cyberangriff auf einen kritischen Zulieferer fuehrt zum Stillstand der eigenen Produktion mangels Materialverfuegbarkeit oder kompromittierter Daten.",
Likelihood: "medium",
Impact: "high",
Mitigation: "Diversifikation der Lieferantenbasis, vertragliche Cybersecurity-Anforderungen an Zulieferer, regelmaessige Risikobewertung der Supply Chain, Notfallbestaende fuer kritische Komponenten.",
},
{
Name: "Industriespionage",
Description: "Wettbewerber oder staatliche Akteure greifen Konstruktionsdaten, Fertigungsverfahren oder strategische Planungen ab.",
Likelihood: "medium",
Impact: "critical",
Mitigation: "DLP-Loesungen (Data Loss Prevention), Verschluesselung von CAD/CAM-Daten, Geheimhaltungsvereinbarungen, Informationsklassifizierung, USB-Port-Kontrolle, Mitarbeiter-Sensibilisierung.",
},
{
Name: "IoT-Botnet-Kompromittierung",
Description: "Ungepatchte IoT-Sensoren und Aktoren werden Teil eines Botnets und dienen als Angriffsinfrastruktur oder Einfallstor ins Unternehmensnetz.",
Likelihood: "high",
Impact: "high",
Mitigation: "Default-Passwoerter aendern, Firmware-Updates automatisieren, IoT-Geraete in eigenem VLAN isolieren, Netzwerk-Traffic-Monitoring, Geraete-Inventar fuehren, unsichere Geraete ersetzen.",
},
},
}
}

View File

@@ -1,77 +0,0 @@
package multitenant
import (
"time"
"github.com/google/uuid"
)
// TenantOverview provides a consolidated view of a tenant's compliance status
// including scores, module highlights, and namespace information.
type TenantOverview struct {
ID uuid.UUID `json:"id"`
Name string `json:"name"`
Slug string `json:"slug"`
Status string `json:"status"`
MaxUsers int `json:"max_users"`
LLMQuotaMonthly int `json:"llm_quota_monthly"`
ComplianceScore int `json:"compliance_score"`
RiskLevel string `json:"risk_level"`
NamespaceCount int `json:"namespace_count"`
// Module highlights
OpenIncidents int `json:"open_incidents"`
OpenReports int `json:"open_reports"` // whistleblower
PendingDSRs int `json:"pending_dsrs"`
TrainingRate float64 `json:"training_completion_rate"`
VendorRiskHigh int `json:"vendor_risk_high"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// MultiTenantOverviewResponse wraps the list of tenant overviews with aggregate metrics.
type MultiTenantOverviewResponse struct {
Tenants []TenantOverview `json:"tenants"`
Total int `json:"total"`
AverageScore int `json:"average_score"`
GeneratedAt time.Time `json:"generated_at"`
}
// CreateTenantRequest represents a request to create a new tenant.
type CreateTenantRequest struct {
Name string `json:"name" binding:"required"`
Slug string `json:"slug" binding:"required"`
MaxUsers int `json:"max_users"`
LLMQuotaMonthly int `json:"llm_quota_monthly"`
}
// UpdateTenantRequest represents a partial update to an existing tenant.
// Pointer fields allow distinguishing between "not provided" and "zero value".
type UpdateTenantRequest struct {
Name *string `json:"name"`
MaxUsers *int `json:"max_users"`
LLMQuotaMonthly *int `json:"llm_quota_monthly"`
Status *string `json:"status"`
}
// CreateNamespaceRequest represents a request to create a new namespace within a tenant.
type CreateNamespaceRequest struct {
Name string `json:"name" binding:"required"`
Slug string `json:"slug" binding:"required"`
IsolationLevel string `json:"isolation_level"`
DataClassification string `json:"data_classification"`
}
// SwitchTenantRequest represents a request to switch the active tenant context.
type SwitchTenantRequest struct {
TenantID string `json:"tenant_id" binding:"required"`
}
// SwitchTenantResponse contains the tenant info needed for the frontend to switch context.
type SwitchTenantResponse struct {
TenantID uuid.UUID `json:"tenant_id"`
TenantName string `json:"tenant_name"`
TenantSlug string `json:"tenant_slug"`
Status string `json:"status"`
}

View File

@@ -1,148 +0,0 @@
package multitenant
import (
"context"
"fmt"
"log"
"time"
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
"github.com/google/uuid"
"github.com/jackc/pgx/v5/pgxpool"
)
// Store provides aggregated multi-tenant views by combining data from the
// existing RBAC store, reporting store, and direct SQL queries for module highlights.
type Store struct {
pool *pgxpool.Pool
rbacStore *rbac.Store
reportingStore *reporting.Store
}
// NewStore creates a new multi-tenant store.
func NewStore(pool *pgxpool.Pool, rbacStore *rbac.Store, reportingStore *reporting.Store) *Store {
return &Store{
pool: pool,
rbacStore: rbacStore,
reportingStore: reportingStore,
}
}
// GetOverview retrieves all tenants with their compliance scores and module highlights.
// It aggregates data from the RBAC tenant list, the reporting compliance score,
// and direct SQL counts for namespaces, incidents, reports, DSRs, training, and vendors.
// Individual query failures are tolerated and result in zero-value defaults.
func (s *Store) GetOverview(ctx context.Context) (*MultiTenantOverviewResponse, error) {
tenants, err := s.rbacStore.ListTenants(ctx)
if err != nil {
return nil, fmt.Errorf("failed to list tenants: %w", err)
}
overviews := make([]TenantOverview, 0, len(tenants))
totalScore := 0
for _, tenant := range tenants {
overview := s.buildTenantOverview(ctx, tenant)
totalScore += overview.ComplianceScore
overviews = append(overviews, overview)
}
averageScore := 0
if len(overviews) > 0 {
averageScore = totalScore / len(overviews)
}
return &MultiTenantOverviewResponse{
Tenants: overviews,
Total: len(overviews),
AverageScore: averageScore,
GeneratedAt: time.Now().UTC(),
}, nil
}
// GetTenantDetail returns detailed compliance info for a specific tenant.
func (s *Store) GetTenantDetail(ctx context.Context, tenantID uuid.UUID) (*TenantOverview, error) {
tenant, err := s.rbacStore.GetTenant(ctx, tenantID)
if err != nil {
return nil, fmt.Errorf("failed to get tenant: %w", err)
}
overview := s.buildTenantOverview(ctx, tenant)
return &overview, nil
}
// buildTenantOverview constructs a TenantOverview by fetching compliance scores
// and module highlights for a single tenant. Errors are logged but do not
// propagate -- missing data defaults to zero values.
func (s *Store) buildTenantOverview(ctx context.Context, tenant *rbac.Tenant) TenantOverview {
overview := TenantOverview{
ID: tenant.ID,
Name: tenant.Name,
Slug: tenant.Slug,
Status: string(tenant.Status),
MaxUsers: tenant.MaxUsers,
LLMQuotaMonthly: tenant.LLMQuotaMonthly,
CreatedAt: tenant.CreatedAt,
UpdatedAt: tenant.UpdatedAt,
}
// Compliance score and risk level derived from an executive report.
// GenerateReport computes the compliance score and risk overview internally.
report, err := s.reportingStore.GenerateReport(ctx, tenant.ID)
if err != nil {
log.Printf("multitenant: failed to generate report for tenant %s: %v", tenant.ID, err)
} else {
overview.ComplianceScore = report.ComplianceScore
overview.RiskLevel = report.RiskOverview.OverallLevel
}
// Namespace count
overview.NamespaceCount = s.countSafe(ctx, tenant.ID,
"SELECT COUNT(*) FROM compliance_namespaces WHERE tenant_id = $1")
// Open incidents
overview.OpenIncidents = s.countSafe(ctx, tenant.ID,
"SELECT COUNT(*) FROM incidents WHERE tenant_id = $1 AND status IN ('new', 'investigating', 'containment')")
// Open whistleblower reports
overview.OpenReports = s.countSafe(ctx, tenant.ID,
"SELECT COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1 AND status IN ('new', 'acknowledged', 'investigating')")
// Pending DSR requests
overview.PendingDSRs = s.countSafe(ctx, tenant.ID,
"SELECT COUNT(*) FROM dsr_requests WHERE tenant_id = $1 AND status IN ('new', 'in_progress')")
// Training completion rate (average progress, 0-100)
overview.TrainingRate = s.avgSafe(ctx, tenant.ID,
"SELECT COALESCE(AVG(CASE WHEN status = 'completed' THEN 100.0 ELSE progress END), 0) FROM academy_enrollments WHERE tenant_id = $1")
// High-risk vendors
overview.VendorRiskHigh = s.countSafe(ctx, tenant.ID,
"SELECT COUNT(*) FROM vendors WHERE tenant_id = $1 AND risk_level = 'high'")
return overview
}
// countSafe executes a COUNT(*) query that takes a single tenant_id parameter.
// If the query fails for any reason (e.g. table does not exist), it returns 0.
func (s *Store) countSafe(ctx context.Context, tenantID uuid.UUID, query string) int {
var count int
err := s.pool.QueryRow(ctx, query, tenantID).Scan(&count)
if err != nil {
// Tolerate errors -- table may not exist or query may fail
return 0
}
return count
}
// avgSafe executes an AVG query that takes a single tenant_id parameter.
// If the query fails for any reason, it returns 0.
func (s *Store) avgSafe(ctx context.Context, tenantID uuid.UUID, query string) float64 {
var avg float64
err := s.pool.QueryRow(ctx, query, tenantID).Scan(&avg)
if err != nil {
return 0
}
return avg
}

View File

@@ -1,97 +0,0 @@
package reporting
import "time"
type ExecutiveReport struct {
GeneratedAt time.Time `json:"generated_at"`
TenantID string `json:"tenant_id"`
ComplianceScore int `json:"compliance_score"` // 0-100 overall score
// Module summaries
DSGVO DSGVOSummary `json:"dsgvo"`
Vendors VendorSummary `json:"vendors"`
Incidents IncidentSummary `json:"incidents"`
Whistleblower WhistleblowerSummary `json:"whistleblower"`
Academy AcademySummary `json:"academy"`
// Cross-module metrics
RiskOverview RiskOverview `json:"risk_overview"`
UpcomingDeadlines []Deadline `json:"upcoming_deadlines"`
RecentActivity []ActivityEntry `json:"recent_activity"`
}
type DSGVOSummary struct {
ProcessingActivities int `json:"processing_activities"`
ActiveProcessings int `json:"active_processings"`
TOMsImplemented int `json:"toms_implemented"`
TOMsPlanned int `json:"toms_planned"`
TOMsTotal int `json:"toms_total"`
CompletionPercent int `json:"completion_percent"` // TOMsImplemented / total * 100
OpenDSRs int `json:"open_dsrs"`
OverdueDSRs int `json:"overdue_dsrs"`
DSFAsCompleted int `json:"dsfas_completed"`
RetentionPolicies int `json:"retention_policies"`
}
type VendorSummary struct {
TotalVendors int `json:"total_vendors"`
ActiveVendors int `json:"active_vendors"`
ByRiskLevel map[string]int `json:"by_risk_level"`
PendingReviews int `json:"pending_reviews"`
ExpiredContracts int `json:"expired_contracts"`
}
type IncidentSummary struct {
TotalIncidents int `json:"total_incidents"`
OpenIncidents int `json:"open_incidents"`
CriticalIncidents int `json:"critical_incidents"`
NotificationsPending int `json:"notifications_pending"`
AvgResolutionHours float64 `json:"avg_resolution_hours"`
}
type WhistleblowerSummary struct {
TotalReports int `json:"total_reports"`
OpenReports int `json:"open_reports"`
OverdueAcknowledgments int `json:"overdue_acknowledgments"`
OverdueFeedbacks int `json:"overdue_feedbacks"`
AvgResolutionDays float64 `json:"avg_resolution_days"`
}
type AcademySummary struct {
TotalCourses int `json:"total_courses"`
TotalEnrollments int `json:"total_enrollments"`
CompletionRate float64 `json:"completion_rate"` // 0-100
OverdueCount int `json:"overdue_count"`
AvgCompletionDays float64 `json:"avg_completion_days"`
}
type RiskOverview struct {
OverallLevel string `json:"overall_level"` // LOW, MEDIUM, HIGH, CRITICAL
ModuleRisks []ModuleRisk `json:"module_risks"`
OpenFindings int `json:"open_findings"`
CriticalFindings int `json:"critical_findings"`
}
type ModuleRisk struct {
Module string `json:"module"`
Level string `json:"level"` // LOW, MEDIUM, HIGH, CRITICAL
Score int `json:"score"` // 0-100
Issues int `json:"issues"`
}
type Deadline struct {
Module string `json:"module"`
Type string `json:"type"`
Description string `json:"description"`
DueDate time.Time `json:"due_date"`
DaysLeft int `json:"days_left"`
Severity string `json:"severity"` // INFO, WARNING, URGENT, OVERDUE
}
type ActivityEntry struct {
Timestamp time.Time `json:"timestamp"`
Module string `json:"module"`
Action string `json:"action"`
Description string `json:"description"`
UserID string `json:"user_id,omitempty"`
}

View File

@@ -1,516 +0,0 @@
package reporting
import (
"context"
"math"
"sort"
"time"
"github.com/breakpilot/ai-compliance-sdk/internal/academy"
"github.com/breakpilot/ai-compliance-sdk/internal/whistleblower"
"github.com/google/uuid"
"github.com/jackc/pgx/v5/pgxpool"
)
type Store struct {
pool *pgxpool.Pool
whistleStore *whistleblower.Store
academyStore *academy.Store
}
func NewStore(pool *pgxpool.Pool, ws *whistleblower.Store, as *academy.Store) *Store {
return &Store{
pool: pool,
whistleStore: ws,
academyStore: as,
}
}
func (s *Store) GenerateReport(ctx context.Context, tenantID uuid.UUID) (*ExecutiveReport, error) {
report := &ExecutiveReport{
GeneratedAt: time.Now().UTC(),
TenantID: tenantID.String(),
}
// 1. Gather DSGVO stats via direct SQL (Python is now primary for DSGVO)
report.DSGVO = s.getDSGVOStats(ctx, tenantID)
// 2. Gather vendor stats via direct SQL (Python is now primary for vendors)
report.Vendors = s.getVendorStats(ctx, tenantID)
// 3. Gather incident stats via direct SQL (Python is now primary for incidents)
report.Incidents = s.getIncidentStats(ctx, tenantID)
// 4. Gather whistleblower stats
whistleStats, err := s.whistleStore.GetStatistics(ctx, tenantID)
if err == nil && whistleStats != nil {
openReports := 0
for status, count := range whistleStats.ByStatus {
if status != "CLOSED" && status != "ARCHIVED" {
openReports += count
}
}
report.Whistleblower = WhistleblowerSummary{
TotalReports: whistleStats.TotalReports,
OpenReports: openReports,
OverdueAcknowledgments: whistleStats.OverdueAcknowledgments,
OverdueFeedbacks: whistleStats.OverdueFeedbacks,
AvgResolutionDays: whistleStats.AvgResolutionDays,
}
}
// 5. Gather academy stats
academyStats, err := s.academyStore.GetStatistics(ctx, tenantID)
if err == nil && academyStats != nil {
report.Academy = AcademySummary{
TotalCourses: academyStats.TotalCourses,
TotalEnrollments: academyStats.TotalEnrollments,
CompletionRate: academyStats.CompletionRate,
OverdueCount: academyStats.OverdueCount,
AvgCompletionDays: academyStats.AvgCompletionDays,
}
}
// 6. Calculate risk overview
report.RiskOverview = s.calculateRiskOverview(report)
// 7. Calculate compliance score (0-100)
report.ComplianceScore = s.calculateComplianceScore(report)
// 8. Gather upcoming deadlines from DB
report.UpcomingDeadlines = s.getUpcomingDeadlines(ctx, tenantID)
// 9. Gather recent activity from DB
report.RecentActivity = s.getRecentActivity(ctx, tenantID)
return report, nil
}
// getDSGVOStats queries DSGVO tables directly (previously via dsgvo.Store)
func (s *Store) getDSGVOStats(ctx context.Context, tenantID uuid.UUID) DSGVOSummary {
summary := DSGVOSummary{}
// Processing activities
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*), COUNT(*) FILTER (WHERE status = 'ACTIVE') FROM compliance.vvt_entries WHERE tenant_id = $1`, tenantID,
).Scan(&summary.ProcessingActivities, &summary.ActiveProcessings)
// TOMs
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FILTER (WHERE status = 'IMPLEMENTED'), COUNT(*) FILTER (WHERE status = 'PLANNED') FROM compliance.tom_entries WHERE tenant_id = $1`, tenantID,
).Scan(&summary.TOMsImplemented, &summary.TOMsPlanned)
summary.TOMsTotal = summary.TOMsImplemented + summary.TOMsPlanned
if summary.TOMsTotal > 0 {
summary.CompletionPercent = int(math.Round(float64(summary.TOMsImplemented) / float64(summary.TOMsTotal) * 100))
}
// DSRs
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FILTER (WHERE status NOT IN ('COMPLETED','REJECTED')), COUNT(*) FILTER (WHERE deadline < NOW() AND status NOT IN ('COMPLETED','REJECTED')) FROM compliance.dsr_requests WHERE tenant_id = $1`, tenantID,
).Scan(&summary.OpenDSRs, &summary.OverdueDSRs)
// DSFAs
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FROM compliance.dsfa_entries WHERE tenant_id = $1 AND status = 'COMPLETED'`, tenantID,
).Scan(&summary.DSFAsCompleted)
// Retention policies
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FROM compliance.loeschfristen WHERE tenant_id = $1`, tenantID,
).Scan(&summary.RetentionPolicies)
return summary
}
// getVendorStats queries vendor tables directly (previously via vendor.Store)
func (s *Store) getVendorStats(ctx context.Context, tenantID uuid.UUID) VendorSummary {
summary := VendorSummary{ByRiskLevel: map[string]int{}}
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*), COUNT(*) FILTER (WHERE status = 'ACTIVE') FROM compliance.vendor_compliance WHERE tenant_id = $1`, tenantID,
).Scan(&summary.TotalVendors, &summary.ActiveVendors)
rows, err := s.pool.Query(ctx,
`SELECT COALESCE(risk_level, 'UNKNOWN'), COUNT(*) FROM compliance.vendor_compliance WHERE tenant_id = $1 GROUP BY risk_level`, tenantID,
)
if err == nil {
defer rows.Close()
for rows.Next() {
var level string
var count int
if rows.Scan(&level, &count) == nil {
summary.ByRiskLevel[level] = count
}
}
}
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FROM compliance.vendor_compliance WHERE tenant_id = $1 AND next_review_date < NOW()`, tenantID,
).Scan(&summary.PendingReviews)
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FROM compliance.vendor_compliance WHERE tenant_id = $1 AND contract_end < NOW()`, tenantID,
).Scan(&summary.ExpiredContracts)
return summary
}
// getIncidentStats queries incident tables directly (previously via incidents.Store)
func (s *Store) getIncidentStats(ctx context.Context, tenantID uuid.UUID) IncidentSummary {
summary := IncidentSummary{}
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*), COUNT(*) FILTER (WHERE status NOT IN ('RESOLVED','CLOSED')), COUNT(*) FILTER (WHERE severity = 'CRITICAL' AND status NOT IN ('RESOLVED','CLOSED')) FROM compliance.incidents WHERE tenant_id = $1`, tenantID,
).Scan(&summary.TotalIncidents, &summary.OpenIncidents, &summary.CriticalIncidents)
_ = s.pool.QueryRow(ctx,
`SELECT COUNT(*) FROM compliance.incidents WHERE tenant_id = $1 AND notification_required = true AND notification_sent = false`, tenantID,
).Scan(&summary.NotificationsPending)
_ = s.pool.QueryRow(ctx,
`SELECT COALESCE(AVG(EXTRACT(EPOCH FROM (resolved_at - created_at))/3600), 0) FROM compliance.incidents WHERE tenant_id = $1 AND resolved_at IS NOT NULL`, tenantID,
).Scan(&summary.AvgResolutionHours)
return summary
}
func (s *Store) calculateRiskOverview(report *ExecutiveReport) RiskOverview {
modules := []ModuleRisk{}
// DSGVO risk based on overdue DSRs and missing TOMs
dsgvoScore := 100
dsgvoIssues := report.DSGVO.OverdueDSRs + report.DSGVO.TOMsPlanned
if report.DSGVO.OverdueDSRs > 0 {
dsgvoScore -= report.DSGVO.OverdueDSRs * 15
}
if report.DSGVO.TOMsTotal > 0 {
dsgvoScore = int(math.Round(float64(report.DSGVO.CompletionPercent)))
}
if dsgvoScore < 0 {
dsgvoScore = 0
}
modules = append(modules, ModuleRisk{Module: "DSGVO", Level: riskLevel(dsgvoScore), Score: dsgvoScore, Issues: dsgvoIssues})
// Vendor risk based on high-risk vendors and pending reviews
vendorScore := 100
vendorIssues := report.Vendors.PendingReviews + report.Vendors.ExpiredContracts
highRisk := 0
if v, ok := report.Vendors.ByRiskLevel["HIGH"]; ok {
highRisk += v
}
if v, ok := report.Vendors.ByRiskLevel["CRITICAL"]; ok {
highRisk += v
}
if report.Vendors.TotalVendors > 0 {
vendorScore = 100 - int(math.Round(float64(highRisk)/float64(report.Vendors.TotalVendors)*100))
}
vendorScore -= report.Vendors.PendingReviews * 5
vendorScore -= report.Vendors.ExpiredContracts * 10
if vendorScore < 0 {
vendorScore = 0
}
modules = append(modules, ModuleRisk{Module: "Vendors", Level: riskLevel(vendorScore), Score: vendorScore, Issues: vendorIssues})
// Incident risk
incidentScore := 100
incidentIssues := report.Incidents.OpenIncidents
incidentScore -= report.Incidents.CriticalIncidents * 20
incidentScore -= report.Incidents.OpenIncidents * 5
incidentScore -= report.Incidents.NotificationsPending * 15
if incidentScore < 0 {
incidentScore = 0
}
modules = append(modules, ModuleRisk{Module: "Incidents", Level: riskLevel(incidentScore), Score: incidentScore, Issues: incidentIssues})
// Whistleblower compliance
whistleScore := 100
whistleIssues := report.Whistleblower.OverdueAcknowledgments + report.Whistleblower.OverdueFeedbacks
whistleScore -= report.Whistleblower.OverdueAcknowledgments * 20
whistleScore -= report.Whistleblower.OverdueFeedbacks * 10
if whistleScore < 0 {
whistleScore = 0
}
modules = append(modules, ModuleRisk{Module: "Whistleblower", Level: riskLevel(whistleScore), Score: whistleScore, Issues: whistleIssues})
// Academy compliance
academyScore := int(math.Round(report.Academy.CompletionRate))
academyIssues := report.Academy.OverdueCount
modules = append(modules, ModuleRisk{Module: "Academy", Level: riskLevel(academyScore), Score: academyScore, Issues: academyIssues})
// Overall score is the average across modules
totalScore := 0
for _, m := range modules {
totalScore += m.Score
}
if len(modules) > 0 {
totalScore = totalScore / len(modules)
}
totalFindings := 0
criticalFindings := 0
for _, m := range modules {
totalFindings += m.Issues
if m.Level == "CRITICAL" {
criticalFindings += m.Issues
}
}
return RiskOverview{
OverallLevel: riskLevel(totalScore),
ModuleRisks: modules,
OpenFindings: totalFindings,
CriticalFindings: criticalFindings,
}
}
func riskLevel(score int) string {
switch {
case score >= 75:
return "LOW"
case score >= 50:
return "MEDIUM"
case score >= 25:
return "HIGH"
default:
return "CRITICAL"
}
}
func (s *Store) calculateComplianceScore(report *ExecutiveReport) int {
scores := []int{}
weights := []int{}
// DSGVO: weight 30 (most important)
if report.DSGVO.TOMsTotal > 0 {
scores = append(scores, report.DSGVO.CompletionPercent)
} else {
scores = append(scores, 0)
}
weights = append(weights, 30)
// Vendor compliance: weight 20
vendorScore := 100
if report.Vendors.TotalVendors > 0 {
vendorScore -= report.Vendors.PendingReviews * 10
vendorScore -= report.Vendors.ExpiredContracts * 15
}
if vendorScore < 0 {
vendorScore = 0
}
scores = append(scores, vendorScore)
weights = append(weights, 20)
// Incident handling: weight 20
incidentScore := 100
incidentScore -= report.Incidents.OpenIncidents * 10
incidentScore -= report.Incidents.NotificationsPending * 20
if incidentScore < 0 {
incidentScore = 0
}
scores = append(scores, incidentScore)
weights = append(weights, 20)
// Whistleblower: weight 15
whistleScore := 100
whistleScore -= report.Whistleblower.OverdueAcknowledgments * 25
whistleScore -= report.Whistleblower.OverdueFeedbacks * 15
if whistleScore < 0 {
whistleScore = 0
}
scores = append(scores, whistleScore)
weights = append(weights, 15)
// Academy: weight 15
academyScore := int(math.Round(report.Academy.CompletionRate))
scores = append(scores, academyScore)
weights = append(weights, 15)
totalWeight := 0
weightedSum := 0
for i, sc := range scores {
weightedSum += sc * weights[i]
totalWeight += weights[i]
}
if totalWeight == 0 {
return 0
}
return int(math.Round(float64(weightedSum) / float64(totalWeight)))
}
func (s *Store) getUpcomingDeadlines(ctx context.Context, tenantID uuid.UUID) []Deadline {
deadlines := []Deadline{}
now := time.Now().UTC()
// Vendor reviews due
rows, err := s.pool.Query(ctx, `
SELECT name, next_review_date FROM compliance.vendor_compliance
WHERE tenant_id = $1 AND next_review_date IS NOT NULL
ORDER BY next_review_date ASC LIMIT 10
`, tenantID)
if err == nil {
defer rows.Close()
for rows.Next() {
var name string
var dueDate time.Time
if err := rows.Scan(&name, &dueDate); err != nil {
continue
}
daysLeft := int(dueDate.Sub(now).Hours() / 24)
severity := "INFO"
if daysLeft < 0 {
severity = "OVERDUE"
} else if daysLeft <= 7 {
severity = "URGENT"
} else if daysLeft <= 30 {
severity = "WARNING"
}
deadlines = append(deadlines, Deadline{
Module: "Vendors",
Type: "REVIEW",
Description: "Vendor-Review: " + name,
DueDate: dueDate,
DaysLeft: daysLeft,
Severity: severity,
})
}
}
// DSR deadlines (overdue)
rows2, err := s.pool.Query(ctx, `
SELECT request_type, deadline FROM compliance.dsr_requests
WHERE tenant_id = $1 AND status NOT IN ('COMPLETED', 'REJECTED')
AND deadline IS NOT NULL
ORDER BY deadline ASC LIMIT 10
`, tenantID)
if err == nil {
defer rows2.Close()
for rows2.Next() {
var reqType string
var dueDate time.Time
if err := rows2.Scan(&reqType, &dueDate); err != nil {
continue
}
daysLeft := int(dueDate.Sub(now).Hours() / 24)
severity := "INFO"
if daysLeft < 0 {
severity = "OVERDUE"
} else if daysLeft <= 3 {
severity = "URGENT"
} else if daysLeft <= 14 {
severity = "WARNING"
}
deadlines = append(deadlines, Deadline{
Module: "DSR",
Type: "RESPONSE",
Description: "Betroffenenrecht: " + reqType,
DueDate: dueDate,
DaysLeft: daysLeft,
Severity: severity,
})
}
}
// Sort by due date ascending
sort.Slice(deadlines, func(i, j int) bool {
return deadlines[i].DueDate.Before(deadlines[j].DueDate)
})
if len(deadlines) > 15 {
deadlines = deadlines[:15]
}
return deadlines
}
func (s *Store) getRecentActivity(ctx context.Context, tenantID uuid.UUID) []ActivityEntry {
activities := []ActivityEntry{}
// Recent vendors created/updated
rows, _ := s.pool.Query(ctx, `
SELECT name, created_at, 'CREATED' as action FROM compliance.vendor_compliance
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
UNION ALL
SELECT name, updated_at, 'UPDATED' FROM compliance.vendor_compliance
WHERE tenant_id = $1 AND updated_at > created_at AND updated_at > NOW() - INTERVAL '30 days'
ORDER BY 2 DESC LIMIT 5
`, tenantID)
if rows != nil {
defer rows.Close()
for rows.Next() {
var name, action string
var ts time.Time
if err := rows.Scan(&name, &ts, &action); err != nil {
continue
}
desc := "Vendor "
if action == "CREATED" {
desc += "angelegt: "
} else {
desc += "aktualisiert: "
}
activities = append(activities, ActivityEntry{
Timestamp: ts,
Module: "Vendors",
Action: action,
Description: desc + name,
})
}
}
// Recent incidents
rows2, _ := s.pool.Query(ctx, `
SELECT title, created_at, severity FROM compliance.incidents
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
ORDER BY created_at DESC LIMIT 5
`, tenantID)
if rows2 != nil {
defer rows2.Close()
for rows2.Next() {
var title, severity string
var ts time.Time
if err := rows2.Scan(&title, &ts, &severity); err != nil {
continue
}
activities = append(activities, ActivityEntry{
Timestamp: ts,
Module: "Incidents",
Action: "CREATED",
Description: "Datenpanne (" + severity + "): " + title,
})
}
}
// Recent whistleblower reports (admin view)
rows3, _ := s.pool.Query(ctx, `
SELECT category, created_at FROM whistleblower_reports
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
ORDER BY created_at DESC LIMIT 5
`, tenantID)
if rows3 != nil {
defer rows3.Close()
for rows3.Next() {
var category string
var ts time.Time
if err := rows3.Scan(&category, &ts); err != nil {
continue
}
activities = append(activities, ActivityEntry{
Timestamp: ts,
Module: "Whistleblower",
Action: "REPORT",
Description: "Neue Meldung: " + category,
})
}
}
// Sort by timestamp descending (most recent first)
sort.Slice(activities, func(i, j int) bool {
return activities[i].Timestamp.After(activities[j].Timestamp)
})
if len(activities) > 20 {
activities = activities[:20]
}
return activities
}

View File

@@ -1,158 +0,0 @@
package sso
import (
"time"
"github.com/google/uuid"
)
// ============================================================================
// Constants / Enums
// ============================================================================
// ProviderType represents the SSO authentication protocol.
type ProviderType string
const (
// ProviderTypeOIDC represents OpenID Connect authentication.
ProviderTypeOIDC ProviderType = "oidc"
// ProviderTypeSAML represents SAML 2.0 authentication.
ProviderTypeSAML ProviderType = "saml"
)
// ============================================================================
// Main Entities
// ============================================================================
// SSOConfig represents a per-tenant SSO provider configuration supporting
// OIDC and SAML authentication protocols.
type SSOConfig struct {
ID uuid.UUID `json:"id" db:"id"`
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
ProviderType ProviderType `json:"provider_type" db:"provider_type"`
Name string `json:"name" db:"name"`
Enabled bool `json:"enabled" db:"enabled"`
// OIDC settings
OIDCIssuerURL string `json:"oidc_issuer_url,omitempty" db:"oidc_issuer_url"`
OIDCClientID string `json:"oidc_client_id,omitempty" db:"oidc_client_id"`
OIDCClientSecret string `json:"oidc_client_secret,omitempty" db:"oidc_client_secret"`
OIDCRedirectURI string `json:"oidc_redirect_uri,omitempty" db:"oidc_redirect_uri"`
OIDCScopes []string `json:"oidc_scopes,omitempty" db:"oidc_scopes"`
// SAML settings (for future use)
SAMLEntityID string `json:"saml_entity_id,omitempty" db:"saml_entity_id"`
SAMLSSOURL string `json:"saml_sso_url,omitempty" db:"saml_sso_url"`
SAMLCertificate string `json:"saml_certificate,omitempty" db:"saml_certificate"`
SAMLACS_URL string `json:"saml_acs_url,omitempty" db:"saml_acs_url"`
// Role mapping: maps SSO group/role names to internal role IDs
RoleMapping map[string]string `json:"role_mapping" db:"role_mapping"`
DefaultRoleID *uuid.UUID `json:"default_role_id,omitempty" db:"default_role_id"`
AutoProvision bool `json:"auto_provision" db:"auto_provision"`
// Audit
CreatedAt time.Time `json:"created_at" db:"created_at"`
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
}
// SSOUser represents a JIT-provisioned user authenticated via an SSO provider.
type SSOUser struct {
ID uuid.UUID `json:"id" db:"id"`
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
SSOConfigID uuid.UUID `json:"sso_config_id" db:"sso_config_id"`
ExternalID string `json:"external_id" db:"external_id"`
Email string `json:"email" db:"email"`
DisplayName string `json:"display_name" db:"display_name"`
Groups []string `json:"groups" db:"groups"`
LastLogin *time.Time `json:"last_login,omitempty" db:"last_login"`
IsActive bool `json:"is_active" db:"is_active"`
// Audit
CreatedAt time.Time `json:"created_at" db:"created_at"`
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
}
// ============================================================================
// API Request Types
// ============================================================================
// CreateSSOConfigRequest is the API request for creating an SSO configuration.
type CreateSSOConfigRequest struct {
ProviderType ProviderType `json:"provider_type" binding:"required"`
Name string `json:"name" binding:"required"`
Enabled bool `json:"enabled"`
OIDCIssuerURL string `json:"oidc_issuer_url"`
OIDCClientID string `json:"oidc_client_id"`
OIDCClientSecret string `json:"oidc_client_secret"`
OIDCRedirectURI string `json:"oidc_redirect_uri"`
OIDCScopes []string `json:"oidc_scopes"`
RoleMapping map[string]string `json:"role_mapping"`
DefaultRoleID *uuid.UUID `json:"default_role_id"`
AutoProvision bool `json:"auto_provision"`
}
// UpdateSSOConfigRequest is the API request for partially updating an SSO
// configuration. Pointer fields allow distinguishing between "not provided"
// (nil) and "set to zero value".
type UpdateSSOConfigRequest struct {
Name *string `json:"name"`
Enabled *bool `json:"enabled"`
OIDCIssuerURL *string `json:"oidc_issuer_url"`
OIDCClientID *string `json:"oidc_client_id"`
OIDCClientSecret *string `json:"oidc_client_secret"`
OIDCRedirectURI *string `json:"oidc_redirect_uri"`
OIDCScopes []string `json:"oidc_scopes"`
RoleMapping map[string]string `json:"role_mapping"`
DefaultRoleID *uuid.UUID `json:"default_role_id"`
AutoProvision *bool `json:"auto_provision"`
}
// ============================================================================
// JWT / Session Types
// ============================================================================
// SSOClaims holds the claims embedded in JWT tokens issued after successful
// SSO authentication. These are used for downstream authorization decisions.
type SSOClaims struct {
UserID uuid.UUID `json:"user_id"`
TenantID uuid.UUID `json:"tenant_id"`
Email string `json:"email"`
DisplayName string `json:"display_name"`
Roles []string `json:"roles"`
SSOConfigID uuid.UUID `json:"sso_config_id"`
}
// ============================================================================
// List / Filter Types
// ============================================================================
// SSOConfigFilters defines filters for listing SSO configurations.
type SSOConfigFilters struct {
ProviderType ProviderType
Enabled *bool
Search string
Limit int
Offset int
}
// SSOUserFilters defines filters for listing SSO users.
type SSOUserFilters struct {
SSOConfigID *uuid.UUID
Email string
IsActive *bool
Limit int
Offset int
}
// SSOConfigListResponse is the API response for listing SSO configurations.
type SSOConfigListResponse struct {
Configs []SSOConfig `json:"configs"`
Total int `json:"total"`
}
// SSOUserListResponse is the API response for listing SSO users.
type SSOUserListResponse struct {
Users []SSOUser `json:"users"`
Total int `json:"total"`
}

View File

@@ -1,477 +0,0 @@
package sso
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgxpool"
)
// Store handles SSO configuration and user data persistence.
type Store struct {
pool *pgxpool.Pool
}
// NewStore creates a new SSO store.
func NewStore(pool *pgxpool.Pool) *Store {
return &Store{pool: pool}
}
// ============================================================================
// SSO Configuration CRUD Operations
// ============================================================================
// CreateConfig creates a new SSO configuration for a tenant.
func (s *Store) CreateConfig(ctx context.Context, tenantID uuid.UUID, req *CreateSSOConfigRequest) (*SSOConfig, error) {
now := time.Now().UTC()
cfg := &SSOConfig{
ID: uuid.New(),
TenantID: tenantID,
ProviderType: req.ProviderType,
Name: req.Name,
Enabled: req.Enabled,
OIDCIssuerURL: req.OIDCIssuerURL,
OIDCClientID: req.OIDCClientID,
OIDCClientSecret: req.OIDCClientSecret,
OIDCRedirectURI: req.OIDCRedirectURI,
OIDCScopes: req.OIDCScopes,
RoleMapping: req.RoleMapping,
DefaultRoleID: req.DefaultRoleID,
AutoProvision: req.AutoProvision,
CreatedAt: now,
UpdatedAt: now,
}
// Apply defaults
if len(cfg.OIDCScopes) == 0 {
cfg.OIDCScopes = []string{"openid", "profile", "email"}
}
if cfg.RoleMapping == nil {
cfg.RoleMapping = map[string]string{}
}
roleMappingJSON, err := json.Marshal(cfg.RoleMapping)
if err != nil {
return nil, fmt.Errorf("failed to marshal role_mapping: %w", err)
}
_, err = s.pool.Exec(ctx, `
INSERT INTO sso_configurations (
id, tenant_id, provider_type, name, enabled,
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
role_mapping, default_role_id, auto_provision,
created_at, updated_at
) VALUES (
$1, $2, $3, $4, $5,
$6, $7, $8, $9, $10,
$11, $12, $13, $14,
$15, $16, $17,
$18, $19
)
`,
cfg.ID, cfg.TenantID, string(cfg.ProviderType), cfg.Name, cfg.Enabled,
cfg.OIDCIssuerURL, cfg.OIDCClientID, cfg.OIDCClientSecret, cfg.OIDCRedirectURI, cfg.OIDCScopes,
cfg.SAMLEntityID, cfg.SAMLSSOURL, cfg.SAMLCertificate, cfg.SAMLACS_URL,
roleMappingJSON, cfg.DefaultRoleID, cfg.AutoProvision,
cfg.CreatedAt, cfg.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to insert sso configuration: %w", err)
}
return cfg, nil
}
// GetConfig retrieves an SSO configuration by ID and tenant.
func (s *Store) GetConfig(ctx context.Context, tenantID, configID uuid.UUID) (*SSOConfig, error) {
var cfg SSOConfig
var providerType string
var roleMappingJSON []byte
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, provider_type, name, enabled,
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
role_mapping, default_role_id, auto_provision,
created_at, updated_at
FROM sso_configurations
WHERE id = $1 AND tenant_id = $2
`, configID, tenantID).Scan(
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
&cfg.CreatedAt, &cfg.UpdatedAt,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, fmt.Errorf("failed to get sso configuration: %w", err)
}
cfg.ProviderType = ProviderType(providerType)
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
return &cfg, nil
}
// GetConfigByName retrieves an SSO configuration by name and tenant.
func (s *Store) GetConfigByName(ctx context.Context, tenantID uuid.UUID, name string) (*SSOConfig, error) {
var cfg SSOConfig
var providerType string
var roleMappingJSON []byte
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, provider_type, name, enabled,
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
role_mapping, default_role_id, auto_provision,
created_at, updated_at
FROM sso_configurations
WHERE tenant_id = $1 AND name = $2
`, tenantID, name).Scan(
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
&cfg.CreatedAt, &cfg.UpdatedAt,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, fmt.Errorf("failed to get sso configuration by name: %w", err)
}
cfg.ProviderType = ProviderType(providerType)
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
return &cfg, nil
}
// ListConfigs lists all SSO configurations for a tenant.
func (s *Store) ListConfigs(ctx context.Context, tenantID uuid.UUID) ([]SSOConfig, error) {
rows, err := s.pool.Query(ctx, `
SELECT
id, tenant_id, provider_type, name, enabled,
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
role_mapping, default_role_id, auto_provision,
created_at, updated_at
FROM sso_configurations
WHERE tenant_id = $1
ORDER BY name ASC
`, tenantID)
if err != nil {
return nil, fmt.Errorf("failed to list sso configurations: %w", err)
}
defer rows.Close()
var configs []SSOConfig
for rows.Next() {
cfg, err := scanSSOConfig(rows)
if err != nil {
return nil, err
}
configs = append(configs, *cfg)
}
return configs, nil
}
// UpdateConfig updates an existing SSO configuration with partial updates.
func (s *Store) UpdateConfig(ctx context.Context, tenantID, configID uuid.UUID, req *UpdateSSOConfigRequest) (*SSOConfig, error) {
cfg, err := s.GetConfig(ctx, tenantID, configID)
if err != nil {
return nil, err
}
if cfg == nil {
return nil, fmt.Errorf("sso configuration not found")
}
// Apply partial updates
if req.Name != nil {
cfg.Name = *req.Name
}
if req.Enabled != nil {
cfg.Enabled = *req.Enabled
}
if req.OIDCIssuerURL != nil {
cfg.OIDCIssuerURL = *req.OIDCIssuerURL
}
if req.OIDCClientID != nil {
cfg.OIDCClientID = *req.OIDCClientID
}
if req.OIDCClientSecret != nil {
cfg.OIDCClientSecret = *req.OIDCClientSecret
}
if req.OIDCRedirectURI != nil {
cfg.OIDCRedirectURI = *req.OIDCRedirectURI
}
if req.OIDCScopes != nil {
cfg.OIDCScopes = req.OIDCScopes
}
if req.RoleMapping != nil {
cfg.RoleMapping = req.RoleMapping
}
if req.DefaultRoleID != nil {
cfg.DefaultRoleID = req.DefaultRoleID
}
if req.AutoProvision != nil {
cfg.AutoProvision = *req.AutoProvision
}
cfg.UpdatedAt = time.Now().UTC()
roleMappingJSON, err := json.Marshal(cfg.RoleMapping)
if err != nil {
return nil, fmt.Errorf("failed to marshal role_mapping: %w", err)
}
_, err = s.pool.Exec(ctx, `
UPDATE sso_configurations SET
name = $3, enabled = $4,
oidc_issuer_url = $5, oidc_client_id = $6, oidc_client_secret = $7,
oidc_redirect_uri = $8, oidc_scopes = $9,
saml_entity_id = $10, saml_sso_url = $11, saml_certificate = $12, saml_acs_url = $13,
role_mapping = $14, default_role_id = $15, auto_provision = $16,
updated_at = $17
WHERE id = $1 AND tenant_id = $2
`,
cfg.ID, cfg.TenantID,
cfg.Name, cfg.Enabled,
cfg.OIDCIssuerURL, cfg.OIDCClientID, cfg.OIDCClientSecret,
cfg.OIDCRedirectURI, cfg.OIDCScopes,
cfg.SAMLEntityID, cfg.SAMLSSOURL, cfg.SAMLCertificate, cfg.SAMLACS_URL,
roleMappingJSON, cfg.DefaultRoleID, cfg.AutoProvision,
cfg.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to update sso configuration: %w", err)
}
return cfg, nil
}
// DeleteConfig deletes an SSO configuration by ID and tenant.
func (s *Store) DeleteConfig(ctx context.Context, tenantID, configID uuid.UUID) error {
_, err := s.pool.Exec(ctx,
"DELETE FROM sso_configurations WHERE id = $1 AND tenant_id = $2",
configID, tenantID,
)
if err != nil {
return fmt.Errorf("failed to delete sso configuration: %w", err)
}
return nil
}
// GetEnabledConfig retrieves the active/enabled SSO configuration for a tenant.
func (s *Store) GetEnabledConfig(ctx context.Context, tenantID uuid.UUID) (*SSOConfig, error) {
var cfg SSOConfig
var providerType string
var roleMappingJSON []byte
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, provider_type, name, enabled,
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
role_mapping, default_role_id, auto_provision,
created_at, updated_at
FROM sso_configurations
WHERE tenant_id = $1 AND enabled = true
LIMIT 1
`, tenantID).Scan(
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
&cfg.CreatedAt, &cfg.UpdatedAt,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, fmt.Errorf("failed to get enabled sso configuration: %w", err)
}
cfg.ProviderType = ProviderType(providerType)
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
return &cfg, nil
}
// ============================================================================
// SSO User Operations
// ============================================================================
// UpsertUser inserts or updates an SSO user via JIT provisioning.
// On conflict (tenant_id, sso_config_id, external_id), the user's email,
// display name, groups, and last login timestamp are updated.
func (s *Store) UpsertUser(ctx context.Context, tenantID, ssoConfigID uuid.UUID, externalID, email, displayName string, groups []string) (*SSOUser, error) {
now := time.Now().UTC()
id := uuid.New()
var user SSOUser
err := s.pool.QueryRow(ctx, `
INSERT INTO sso_users (
id, tenant_id, sso_config_id,
external_id, email, display_name, groups,
last_login, is_active,
created_at, updated_at
) VALUES (
$1, $2, $3,
$4, $5, $6, $7,
$8, true,
$8, $8
)
ON CONFLICT (tenant_id, sso_config_id, external_id) DO UPDATE SET
email = EXCLUDED.email,
display_name = EXCLUDED.display_name,
groups = EXCLUDED.groups,
last_login = EXCLUDED.last_login,
is_active = true,
updated_at = EXCLUDED.updated_at
RETURNING
id, tenant_id, sso_config_id,
external_id, email, display_name, groups,
last_login, is_active,
created_at, updated_at
`,
id, tenantID, ssoConfigID,
externalID, email, displayName, groups,
now,
).Scan(
&user.ID, &user.TenantID, &user.SSOConfigID,
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
&user.LastLogin, &user.IsActive,
&user.CreatedAt, &user.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to upsert sso user: %w", err)
}
return &user, nil
}
// GetUserByExternalID looks up an SSO user by their external identity provider ID.
func (s *Store) GetUserByExternalID(ctx context.Context, tenantID, ssoConfigID uuid.UUID, externalID string) (*SSOUser, error) {
var user SSOUser
err := s.pool.QueryRow(ctx, `
SELECT
id, tenant_id, sso_config_id,
external_id, email, display_name, groups,
last_login, is_active,
created_at, updated_at
FROM sso_users
WHERE tenant_id = $1 AND sso_config_id = $2 AND external_id = $3
`, tenantID, ssoConfigID, externalID).Scan(
&user.ID, &user.TenantID, &user.SSOConfigID,
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
&user.LastLogin, &user.IsActive,
&user.CreatedAt, &user.UpdatedAt,
)
if err == pgx.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, fmt.Errorf("failed to get sso user by external id: %w", err)
}
return &user, nil
}
// ListUsers lists all SSO-provisioned users for a tenant.
func (s *Store) ListUsers(ctx context.Context, tenantID uuid.UUID) ([]SSOUser, error) {
rows, err := s.pool.Query(ctx, `
SELECT
id, tenant_id, sso_config_id,
external_id, email, display_name, groups,
last_login, is_active,
created_at, updated_at
FROM sso_users
WHERE tenant_id = $1
ORDER BY display_name ASC
`, tenantID)
if err != nil {
return nil, fmt.Errorf("failed to list sso users: %w", err)
}
defer rows.Close()
var users []SSOUser
for rows.Next() {
user, err := scanSSOUser(rows)
if err != nil {
return nil, err
}
users = append(users, *user)
}
return users, nil
}
// ============================================================================
// Row Scanning Helpers
// ============================================================================
// scanSSOConfig scans an SSO configuration row from pgx.Rows.
func scanSSOConfig(rows pgx.Rows) (*SSOConfig, error) {
var cfg SSOConfig
var providerType string
var roleMappingJSON []byte
err := rows.Scan(
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
&cfg.CreatedAt, &cfg.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to scan sso configuration: %w", err)
}
cfg.ProviderType = ProviderType(providerType)
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
return &cfg, nil
}
// scanSSOUser scans an SSO user row from pgx.Rows.
func scanSSOUser(rows pgx.Rows) (*SSOUser, error) {
var user SSOUser
err := rows.Scan(
&user.ID, &user.TenantID, &user.SSOConfigID,
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
&user.LastLogin, &user.IsActive,
&user.CreatedAt, &user.UpdatedAt,
)
if err != nil {
return nil, fmt.Errorf("failed to scan sso user: %w", err)
}
return &user, nil
}
// unmarshalRoleMapping safely unmarshals JSONB role_mapping bytes into a map.
func unmarshalRoleMapping(data []byte) map[string]string {
if data == nil {
return map[string]string{}
}
var m map[string]string
if err := json.Unmarshal(data, &m); err != nil {
return map[string]string{}
}
return m
}