feat: Add Academy, Whistleblower, Incidents, Vendor, DSB, SSO, Reporting, Multi-Tenant and Industry backends
Go handlers, models, stores and migrations for all SDK modules. Updates developer portal navigation and BYOEH page. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -15,8 +15,16 @@ import (
|
|||||||
"github.com/breakpilot/ai-compliance-sdk/internal/dsgvo"
|
"github.com/breakpilot/ai-compliance-sdk/internal/dsgvo"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/llm"
|
"github.com/breakpilot/ai-compliance-sdk/internal/llm"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/academy"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/incidents"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/roadmap"
|
"github.com/breakpilot/ai-compliance-sdk/internal/roadmap"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/ucca"
|
"github.com/breakpilot/ai-compliance-sdk/internal/ucca"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/whistleblower"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/dsb"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/multitenant"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/sso"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/vendor"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/workshop"
|
"github.com/breakpilot/ai-compliance-sdk/internal/workshop"
|
||||||
"github.com/breakpilot/ai-compliance-sdk/internal/portfolio"
|
"github.com/breakpilot/ai-compliance-sdk/internal/portfolio"
|
||||||
"github.com/gin-contrib/cors"
|
"github.com/gin-contrib/cors"
|
||||||
@@ -59,6 +67,14 @@ func main() {
|
|||||||
roadmapStore := roadmap.NewStore(pool)
|
roadmapStore := roadmap.NewStore(pool)
|
||||||
workshopStore := workshop.NewStore(pool)
|
workshopStore := workshop.NewStore(pool)
|
||||||
portfolioStore := portfolio.NewStore(pool)
|
portfolioStore := portfolio.NewStore(pool)
|
||||||
|
academyStore := academy.NewStore(pool)
|
||||||
|
whistleblowerStore := whistleblower.NewStore(pool)
|
||||||
|
incidentStore := incidents.NewStore(pool)
|
||||||
|
vendorStore := vendor.NewStore(pool)
|
||||||
|
reportingStore := reporting.NewStore(pool, dsgvoStore, vendorStore, incidentStore, whistleblowerStore, academyStore)
|
||||||
|
ssoStore := sso.NewStore(pool)
|
||||||
|
multitenantStore := multitenant.NewStore(pool, rbacStore, reportingStore)
|
||||||
|
dsbStore := dsb.NewStore(pool, reportingStore)
|
||||||
|
|
||||||
// Initialize services
|
// Initialize services
|
||||||
rbacService := rbac.NewService(rbacStore)
|
rbacService := rbac.NewService(rbacStore)
|
||||||
@@ -98,6 +114,15 @@ func main() {
|
|||||||
workshopHandlers := handlers.NewWorkshopHandlers(workshopStore)
|
workshopHandlers := handlers.NewWorkshopHandlers(workshopStore)
|
||||||
portfolioHandlers := handlers.NewPortfolioHandlers(portfolioStore)
|
portfolioHandlers := handlers.NewPortfolioHandlers(portfolioStore)
|
||||||
draftingHandlers := handlers.NewDraftingHandlers(accessGate, providerRegistry, piiDetector, auditStore, trailBuilder)
|
draftingHandlers := handlers.NewDraftingHandlers(accessGate, providerRegistry, piiDetector, auditStore, trailBuilder)
|
||||||
|
academyHandlers := handlers.NewAcademyHandlers(academyStore)
|
||||||
|
whistleblowerHandlers := handlers.NewWhistleblowerHandlers(whistleblowerStore)
|
||||||
|
incidentHandlers := handlers.NewIncidentHandlers(incidentStore)
|
||||||
|
vendorHandlers := handlers.NewVendorHandlers(vendorStore)
|
||||||
|
reportingHandlers := handlers.NewReportingHandlers(reportingStore)
|
||||||
|
ssoHandlers := handlers.NewSSOHandlers(ssoStore, cfg.JWTSecret)
|
||||||
|
multitenantHandlers := handlers.NewMultiTenantHandlers(multitenantStore, rbacStore)
|
||||||
|
industryHandlers := handlers.NewIndustryHandlers()
|
||||||
|
dsbHandlers := handlers.NewDSBHandlers(dsbStore)
|
||||||
|
|
||||||
// Initialize middleware
|
// Initialize middleware
|
||||||
rbacMiddleware := rbac.NewMiddleware(rbacService, policyEngine)
|
rbacMiddleware := rbac.NewMiddleware(rbacService, policyEngine)
|
||||||
@@ -435,6 +460,197 @@ func main() {
|
|||||||
draftingRoutes.POST("/validate", draftingHandlers.ValidateDocument)
|
draftingRoutes.POST("/validate", draftingHandlers.ValidateDocument)
|
||||||
draftingRoutes.GET("/history", draftingHandlers.GetDraftHistory)
|
draftingRoutes.GET("/history", draftingHandlers.GetDraftHistory)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Academy routes - E-Learning / Compliance Training
|
||||||
|
academyRoutes := v1.Group("/academy")
|
||||||
|
{
|
||||||
|
// Courses
|
||||||
|
academyRoutes.POST("/courses", academyHandlers.CreateCourse)
|
||||||
|
academyRoutes.GET("/courses", academyHandlers.ListCourses)
|
||||||
|
academyRoutes.GET("/courses/:id", academyHandlers.GetCourse)
|
||||||
|
academyRoutes.PUT("/courses/:id", academyHandlers.UpdateCourse)
|
||||||
|
academyRoutes.DELETE("/courses/:id", academyHandlers.DeleteCourse)
|
||||||
|
|
||||||
|
// Enrollments
|
||||||
|
academyRoutes.POST("/enrollments", academyHandlers.CreateEnrollment)
|
||||||
|
academyRoutes.GET("/enrollments", academyHandlers.ListEnrollments)
|
||||||
|
academyRoutes.PUT("/enrollments/:id/progress", academyHandlers.UpdateProgress)
|
||||||
|
academyRoutes.POST("/enrollments/:id/complete", academyHandlers.CompleteEnrollment)
|
||||||
|
|
||||||
|
// Certificates
|
||||||
|
academyRoutes.GET("/certificates/:id", academyHandlers.GetCertificate)
|
||||||
|
academyRoutes.POST("/enrollments/:id/certificate", academyHandlers.GenerateCertificate)
|
||||||
|
|
||||||
|
// Quiz
|
||||||
|
academyRoutes.POST("/courses/:id/quiz", academyHandlers.SubmitQuiz)
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
academyRoutes.GET("/stats", academyHandlers.GetStatistics)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Whistleblower routes - Hinweisgebersystem (HinSchG)
|
||||||
|
whistleblowerRoutes := v1.Group("/whistleblower")
|
||||||
|
{
|
||||||
|
// Public endpoints (anonymous reporting)
|
||||||
|
whistleblowerRoutes.POST("/reports/submit", whistleblowerHandlers.SubmitReport)
|
||||||
|
whistleblowerRoutes.GET("/reports/access/:accessKey", whistleblowerHandlers.GetReportByAccessKey)
|
||||||
|
whistleblowerRoutes.POST("/reports/access/:accessKey/messages", whistleblowerHandlers.SendPublicMessage)
|
||||||
|
|
||||||
|
// Admin endpoints
|
||||||
|
whistleblowerRoutes.GET("/reports", whistleblowerHandlers.ListReports)
|
||||||
|
whistleblowerRoutes.GET("/reports/:id", whistleblowerHandlers.GetReport)
|
||||||
|
whistleblowerRoutes.PUT("/reports/:id", whistleblowerHandlers.UpdateReport)
|
||||||
|
whistleblowerRoutes.DELETE("/reports/:id", whistleblowerHandlers.DeleteReport)
|
||||||
|
whistleblowerRoutes.POST("/reports/:id/acknowledge", whistleblowerHandlers.AcknowledgeReport)
|
||||||
|
whistleblowerRoutes.POST("/reports/:id/investigate", whistleblowerHandlers.StartInvestigation)
|
||||||
|
whistleblowerRoutes.POST("/reports/:id/measures", whistleblowerHandlers.AddMeasure)
|
||||||
|
whistleblowerRoutes.POST("/reports/:id/close", whistleblowerHandlers.CloseReport)
|
||||||
|
whistleblowerRoutes.POST("/reports/:id/messages", whistleblowerHandlers.SendAdminMessage)
|
||||||
|
whistleblowerRoutes.GET("/reports/:id/messages", whistleblowerHandlers.ListMessages)
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
whistleblowerRoutes.GET("/stats", whistleblowerHandlers.GetStatistics)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Incidents routes - Datenpannen-Management (DSGVO Art. 33/34)
|
||||||
|
incidentRoutes := v1.Group("/incidents")
|
||||||
|
{
|
||||||
|
// Incident CRUD
|
||||||
|
incidentRoutes.POST("", incidentHandlers.CreateIncident)
|
||||||
|
incidentRoutes.GET("", incidentHandlers.ListIncidents)
|
||||||
|
incidentRoutes.GET("/:id", incidentHandlers.GetIncident)
|
||||||
|
incidentRoutes.PUT("/:id", incidentHandlers.UpdateIncident)
|
||||||
|
incidentRoutes.DELETE("/:id", incidentHandlers.DeleteIncident)
|
||||||
|
|
||||||
|
// Risk Assessment
|
||||||
|
incidentRoutes.POST("/:id/assess-risk", incidentHandlers.AssessRisk)
|
||||||
|
|
||||||
|
// Authority Notification (Art. 33)
|
||||||
|
incidentRoutes.POST("/:id/notify-authority", incidentHandlers.SubmitAuthorityNotification)
|
||||||
|
|
||||||
|
// Data Subject Notification (Art. 34)
|
||||||
|
incidentRoutes.POST("/:id/notify-subjects", incidentHandlers.NotifyDataSubjects)
|
||||||
|
|
||||||
|
// Measures
|
||||||
|
incidentRoutes.POST("/:id/measures", incidentHandlers.AddMeasure)
|
||||||
|
incidentRoutes.PUT("/:id/measures/:measureId", incidentHandlers.UpdateMeasure)
|
||||||
|
incidentRoutes.POST("/:id/measures/:measureId/complete", incidentHandlers.CompleteMeasure)
|
||||||
|
|
||||||
|
// Timeline
|
||||||
|
incidentRoutes.POST("/:id/timeline", incidentHandlers.AddTimelineEntry)
|
||||||
|
|
||||||
|
// Lifecycle
|
||||||
|
incidentRoutes.POST("/:id/close", incidentHandlers.CloseIncident)
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
incidentRoutes.GET("/stats", incidentHandlers.GetStatistics)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Vendor Compliance routes - Vendor Management & AVV/DPA (DSGVO Art. 28)
|
||||||
|
vendorRoutes := v1.Group("/vendors")
|
||||||
|
{
|
||||||
|
// Vendor CRUD
|
||||||
|
vendorRoutes.POST("", vendorHandlers.CreateVendor)
|
||||||
|
vendorRoutes.GET("", vendorHandlers.ListVendors)
|
||||||
|
vendorRoutes.GET("/:id", vendorHandlers.GetVendor)
|
||||||
|
vendorRoutes.PUT("/:id", vendorHandlers.UpdateVendor)
|
||||||
|
vendorRoutes.DELETE("/:id", vendorHandlers.DeleteVendor)
|
||||||
|
|
||||||
|
// Contracts (AVV/DPA)
|
||||||
|
vendorRoutes.POST("/contracts", vendorHandlers.CreateContract)
|
||||||
|
vendorRoutes.GET("/contracts", vendorHandlers.ListContracts)
|
||||||
|
vendorRoutes.GET("/contracts/:id", vendorHandlers.GetContract)
|
||||||
|
vendorRoutes.PUT("/contracts/:id", vendorHandlers.UpdateContract)
|
||||||
|
vendorRoutes.DELETE("/contracts/:id", vendorHandlers.DeleteContract)
|
||||||
|
|
||||||
|
// Findings
|
||||||
|
vendorRoutes.POST("/findings", vendorHandlers.CreateFinding)
|
||||||
|
vendorRoutes.GET("/findings", vendorHandlers.ListFindings)
|
||||||
|
vendorRoutes.GET("/findings/:id", vendorHandlers.GetFinding)
|
||||||
|
vendorRoutes.PUT("/findings/:id", vendorHandlers.UpdateFinding)
|
||||||
|
vendorRoutes.POST("/findings/:id/resolve", vendorHandlers.ResolveFinding)
|
||||||
|
|
||||||
|
// Control Instances
|
||||||
|
vendorRoutes.POST("/controls", vendorHandlers.UpsertControlInstance)
|
||||||
|
vendorRoutes.GET("/controls", vendorHandlers.ListControlInstances)
|
||||||
|
|
||||||
|
// Templates
|
||||||
|
vendorRoutes.GET("/templates", vendorHandlers.ListTemplates)
|
||||||
|
vendorRoutes.GET("/templates/:templateId", vendorHandlers.GetTemplate)
|
||||||
|
vendorRoutes.POST("/templates", vendorHandlers.CreateTemplate)
|
||||||
|
vendorRoutes.POST("/templates/:templateId/apply", vendorHandlers.ApplyTemplate)
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
vendorRoutes.GET("/stats", vendorHandlers.GetStatistics)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reporting routes - Executive Compliance Reporting Dashboard
|
||||||
|
reportingRoutes := v1.Group("/reporting")
|
||||||
|
{
|
||||||
|
reportingRoutes.GET("/executive", reportingHandlers.GetExecutiveReport)
|
||||||
|
reportingRoutes.GET("/score", reportingHandlers.GetComplianceScore)
|
||||||
|
reportingRoutes.GET("/deadlines", reportingHandlers.GetUpcomingDeadlines)
|
||||||
|
reportingRoutes.GET("/risks", reportingHandlers.GetRiskOverview)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSO routes - Single Sign-On (SAML/OIDC)
|
||||||
|
ssoRoutes := v1.Group("/sso")
|
||||||
|
{
|
||||||
|
// Config CRUD
|
||||||
|
ssoRoutes.POST("/configs", ssoHandlers.CreateConfig)
|
||||||
|
ssoRoutes.GET("/configs", ssoHandlers.ListConfigs)
|
||||||
|
ssoRoutes.GET("/configs/:id", ssoHandlers.GetConfig)
|
||||||
|
ssoRoutes.PUT("/configs/:id", ssoHandlers.UpdateConfig)
|
||||||
|
ssoRoutes.DELETE("/configs/:id", ssoHandlers.DeleteConfig)
|
||||||
|
|
||||||
|
// SSO Users
|
||||||
|
ssoRoutes.GET("/users", ssoHandlers.ListUsers)
|
||||||
|
|
||||||
|
// OIDC Flow
|
||||||
|
ssoRoutes.GET("/oidc/login", ssoHandlers.InitiateOIDCLogin)
|
||||||
|
ssoRoutes.GET("/oidc/callback", ssoHandlers.HandleOIDCCallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Multi-Tenant Administration routes
|
||||||
|
mtRoutes := v1.Group("/multi-tenant")
|
||||||
|
{
|
||||||
|
mtRoutes.GET("/overview", multitenantHandlers.GetOverview)
|
||||||
|
mtRoutes.POST("/tenants", multitenantHandlers.CreateTenant)
|
||||||
|
mtRoutes.GET("/tenants/:id", multitenantHandlers.GetTenantDetail)
|
||||||
|
mtRoutes.PUT("/tenants/:id", multitenantHandlers.UpdateTenant)
|
||||||
|
mtRoutes.GET("/tenants/:id/namespaces", multitenantHandlers.ListNamespaces)
|
||||||
|
mtRoutes.POST("/tenants/:id/namespaces", multitenantHandlers.CreateNamespace)
|
||||||
|
mtRoutes.POST("/switch", multitenantHandlers.SwitchTenant)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Industry-Specific Templates routes (Phase 3.3)
|
||||||
|
industryRoutes := v1.Group("/industry/templates")
|
||||||
|
{
|
||||||
|
industryRoutes.GET("", industryHandlers.ListIndustries)
|
||||||
|
industryRoutes.GET("/:slug", industryHandlers.GetIndustry)
|
||||||
|
industryRoutes.GET("/:slug/vvt", industryHandlers.GetVVTTemplates)
|
||||||
|
industryRoutes.GET("/:slug/tom", industryHandlers.GetTOMRecommendations)
|
||||||
|
industryRoutes.GET("/:slug/risks", industryHandlers.GetRiskScenarios)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DSB-as-a-Service Portal routes (Phase 3.4)
|
||||||
|
dsbRoutes := v1.Group("/dsb")
|
||||||
|
{
|
||||||
|
dsbRoutes.GET("/dashboard", dsbHandlers.GetDashboard)
|
||||||
|
dsbRoutes.POST("/assignments", dsbHandlers.CreateAssignment)
|
||||||
|
dsbRoutes.GET("/assignments", dsbHandlers.ListAssignments)
|
||||||
|
dsbRoutes.GET("/assignments/:id", dsbHandlers.GetAssignment)
|
||||||
|
dsbRoutes.PUT("/assignments/:id", dsbHandlers.UpdateAssignment)
|
||||||
|
dsbRoutes.POST("/assignments/:id/hours", dsbHandlers.CreateHourEntry)
|
||||||
|
dsbRoutes.GET("/assignments/:id/hours", dsbHandlers.ListHours)
|
||||||
|
dsbRoutes.GET("/assignments/:id/hours/summary", dsbHandlers.GetHoursSummary)
|
||||||
|
dsbRoutes.POST("/assignments/:id/tasks", dsbHandlers.CreateTask)
|
||||||
|
dsbRoutes.GET("/assignments/:id/tasks", dsbHandlers.ListTasks)
|
||||||
|
dsbRoutes.PUT("/tasks/:taskId", dsbHandlers.UpdateTask)
|
||||||
|
dsbRoutes.POST("/tasks/:taskId/complete", dsbHandlers.CompleteTask)
|
||||||
|
dsbRoutes.POST("/assignments/:id/communications", dsbHandlers.CreateCommunication)
|
||||||
|
dsbRoutes.GET("/assignments/:id/communications", dsbHandlers.ListCommunications)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create HTTP server
|
// Create HTTP server
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ require (
|
|||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
github.com/go-playground/validator/v10 v10.26.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.5 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||||
github.com/hhrutter/lzw v1.0.0 // indirect
|
github.com/hhrutter/lzw v1.0.0 // indirect
|
||||||
github.com/hhrutter/tiff v1.0.1 // indirect
|
github.com/hhrutter/tiff v1.0.1 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
|
|||||||
@@ -33,6 +33,8 @@ github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc
|
|||||||
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
|||||||
226
ai-compliance-sdk/internal/academy/models.go
Normal file
226
ai-compliance-sdk/internal/academy/models.go
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
package academy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Constants / Enums
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CourseCategory represents the category of a compliance course
|
||||||
|
type CourseCategory string
|
||||||
|
|
||||||
|
const (
|
||||||
|
CourseCategoryDSGVOBasics CourseCategory = "dsgvo_basics"
|
||||||
|
CourseCategoryITSecurity CourseCategory = "it_security"
|
||||||
|
CourseCategoryAILiteracy CourseCategory = "ai_literacy"
|
||||||
|
CourseCategoryWhistleblowerProtection CourseCategory = "whistleblower_protection"
|
||||||
|
CourseCategoryCustom CourseCategory = "custom"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EnrollmentStatus represents the status of an enrollment
|
||||||
|
type EnrollmentStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
EnrollmentStatusNotStarted EnrollmentStatus = "not_started"
|
||||||
|
EnrollmentStatusInProgress EnrollmentStatus = "in_progress"
|
||||||
|
EnrollmentStatusCompleted EnrollmentStatus = "completed"
|
||||||
|
EnrollmentStatusExpired EnrollmentStatus = "expired"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LessonType represents the type of a lesson
|
||||||
|
type LessonType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LessonTypeVideo LessonType = "video"
|
||||||
|
LessonTypeText LessonType = "text"
|
||||||
|
LessonTypeQuiz LessonType = "quiz"
|
||||||
|
LessonTypeInteractive LessonType = "interactive"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Main Entities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Course represents a compliance training course
|
||||||
|
type Course struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Category CourseCategory `json:"category"`
|
||||||
|
DurationMinutes int `json:"duration_minutes"`
|
||||||
|
RequiredForRoles []string `json:"required_for_roles"` // JSONB in DB
|
||||||
|
Lessons []Lesson `json:"lessons,omitempty"`
|
||||||
|
IsActive bool `json:"is_active"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lesson represents a single lesson within a course
|
||||||
|
type Lesson struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
CourseID uuid.UUID `json:"course_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
LessonType LessonType `json:"lesson_type"`
|
||||||
|
ContentURL string `json:"content_url,omitempty"`
|
||||||
|
DurationMinutes int `json:"duration_minutes"`
|
||||||
|
OrderIndex int `json:"order_index"`
|
||||||
|
QuizQuestions []QuizQuestion `json:"quiz_questions,omitempty"` // JSONB in DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuizQuestion represents a single quiz question embedded in a lesson
|
||||||
|
type QuizQuestion struct {
|
||||||
|
Question string `json:"question"`
|
||||||
|
Options []string `json:"options"`
|
||||||
|
CorrectIndex int `json:"correct_index"`
|
||||||
|
Explanation string `json:"explanation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enrollment represents a user's enrollment in a course
|
||||||
|
type Enrollment struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
CourseID uuid.UUID `json:"course_id"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
UserName string `json:"user_name"`
|
||||||
|
UserEmail string `json:"user_email"`
|
||||||
|
Status EnrollmentStatus `json:"status"`
|
||||||
|
ProgressPercent int `json:"progress_percent"`
|
||||||
|
CurrentLessonIndex int `json:"current_lesson_index"`
|
||||||
|
StartedAt *time.Time `json:"started_at,omitempty"`
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
Deadline *time.Time `json:"deadline,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Certificate represents a completion certificate for an enrollment
|
||||||
|
type Certificate struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
EnrollmentID uuid.UUID `json:"enrollment_id"`
|
||||||
|
UserName string `json:"user_name"`
|
||||||
|
CourseTitle string `json:"course_title"`
|
||||||
|
IssuedAt time.Time `json:"issued_at"`
|
||||||
|
ValidUntil *time.Time `json:"valid_until,omitempty"`
|
||||||
|
PDFURL string `json:"pdf_url,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcademyStatistics contains aggregated academy metrics
|
||||||
|
type AcademyStatistics struct {
|
||||||
|
TotalCourses int `json:"total_courses"`
|
||||||
|
TotalEnrollments int `json:"total_enrollments"`
|
||||||
|
CompletionRate float64 `json:"completion_rate"` // 0-100
|
||||||
|
OverdueCount int `json:"overdue_count"`
|
||||||
|
AvgCompletionDays float64 `json:"avg_completion_days"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Filter Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CourseFilters defines filters for listing courses
|
||||||
|
type CourseFilters struct {
|
||||||
|
Category CourseCategory
|
||||||
|
IsActive *bool
|
||||||
|
Search string
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnrollmentFilters defines filters for listing enrollments
|
||||||
|
type EnrollmentFilters struct {
|
||||||
|
CourseID *uuid.UUID
|
||||||
|
UserID *uuid.UUID
|
||||||
|
Status EnrollmentStatus
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateCourseRequest is the API request for creating a course
|
||||||
|
type CreateCourseRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Category CourseCategory `json:"category" binding:"required"`
|
||||||
|
DurationMinutes int `json:"duration_minutes"`
|
||||||
|
RequiredForRoles []string `json:"required_for_roles,omitempty"`
|
||||||
|
Lessons []CreateLessonRequest `json:"lessons,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateLessonRequest is the API request for creating a lesson
|
||||||
|
type CreateLessonRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
LessonType LessonType `json:"lesson_type" binding:"required"`
|
||||||
|
ContentURL string `json:"content_url,omitempty"`
|
||||||
|
DurationMinutes int `json:"duration_minutes"`
|
||||||
|
OrderIndex int `json:"order_index"`
|
||||||
|
QuizQuestions []QuizQuestion `json:"quiz_questions,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCourseRequest is the API request for updating a course
|
||||||
|
type UpdateCourseRequest struct {
|
||||||
|
Title *string `json:"title,omitempty"`
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
Category *CourseCategory `json:"category,omitempty"`
|
||||||
|
DurationMinutes *int `json:"duration_minutes,omitempty"`
|
||||||
|
RequiredForRoles []string `json:"required_for_roles,omitempty"`
|
||||||
|
IsActive *bool `json:"is_active,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnrollUserRequest is the API request for enrolling a user in a course
|
||||||
|
type EnrollUserRequest struct {
|
||||||
|
CourseID uuid.UUID `json:"course_id" binding:"required"`
|
||||||
|
UserID uuid.UUID `json:"user_id" binding:"required"`
|
||||||
|
UserName string `json:"user_name" binding:"required"`
|
||||||
|
UserEmail string `json:"user_email" binding:"required"`
|
||||||
|
Deadline *time.Time `json:"deadline,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateProgressRequest is the API request for updating enrollment progress
|
||||||
|
type UpdateProgressRequest struct {
|
||||||
|
Progress int `json:"progress" binding:"required"`
|
||||||
|
CurrentLesson int `json:"current_lesson"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SubmitQuizRequest is the API request for submitting quiz answers
|
||||||
|
type SubmitQuizRequest struct {
|
||||||
|
LessonID uuid.UUID `json:"lesson_id" binding:"required"`
|
||||||
|
Answers []int `json:"answers" binding:"required"` // Index of selected answer per question
|
||||||
|
}
|
||||||
|
|
||||||
|
// SubmitQuizResponse is the API response for quiz submission
|
||||||
|
type SubmitQuizResponse struct {
|
||||||
|
Score int `json:"score"` // 0-100
|
||||||
|
Passed bool `json:"passed"`
|
||||||
|
CorrectAnswers int `json:"correct_answers"`
|
||||||
|
TotalQuestions int `json:"total_questions"`
|
||||||
|
Results []QuizResult `json:"results"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QuizResult represents the result for a single quiz question
|
||||||
|
type QuizResult struct {
|
||||||
|
Question string `json:"question"`
|
||||||
|
Correct bool `json:"correct"`
|
||||||
|
Explanation string `json:"explanation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CourseListResponse is the API response for listing courses
|
||||||
|
type CourseListResponse struct {
|
||||||
|
Courses []Course `json:"courses"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnrollmentListResponse is the API response for listing enrollments
|
||||||
|
type EnrollmentListResponse struct {
|
||||||
|
Enrollments []Enrollment `json:"enrollments"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
666
ai-compliance-sdk/internal/academy/store.go
Normal file
666
ai-compliance-sdk/internal/academy/store.go
Normal file
@@ -0,0 +1,666 @@
|
|||||||
|
package academy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store handles academy data persistence
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new academy store
|
||||||
|
func NewStore(pool *pgxpool.Pool) *Store {
|
||||||
|
return &Store{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Course CRUD Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateCourse creates a new course
|
||||||
|
func (s *Store) CreateCourse(ctx context.Context, course *Course) error {
|
||||||
|
course.ID = uuid.New()
|
||||||
|
course.CreatedAt = time.Now().UTC()
|
||||||
|
course.UpdatedAt = course.CreatedAt
|
||||||
|
if !course.IsActive {
|
||||||
|
course.IsActive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
requiredForRoles, _ := json.Marshal(course.RequiredForRoles)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO academy_courses (
|
||||||
|
id, tenant_id, title, description, category,
|
||||||
|
duration_minutes, required_for_roles, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5,
|
||||||
|
$6, $7, $8,
|
||||||
|
$9, $10
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
course.ID, course.TenantID, course.Title, course.Description, string(course.Category),
|
||||||
|
course.DurationMinutes, requiredForRoles, course.IsActive,
|
||||||
|
course.CreatedAt, course.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCourse retrieves a course by ID
|
||||||
|
func (s *Store) GetCourse(ctx context.Context, id uuid.UUID) (*Course, error) {
|
||||||
|
var course Course
|
||||||
|
var category string
|
||||||
|
var requiredForRoles []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, title, description, category,
|
||||||
|
duration_minutes, required_for_roles, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM academy_courses WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&course.ID, &course.TenantID, &course.Title, &course.Description, &category,
|
||||||
|
&course.DurationMinutes, &requiredForRoles, &course.IsActive,
|
||||||
|
&course.CreatedAt, &course.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
course.Category = CourseCategory(category)
|
||||||
|
json.Unmarshal(requiredForRoles, &course.RequiredForRoles)
|
||||||
|
if course.RequiredForRoles == nil {
|
||||||
|
course.RequiredForRoles = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load lessons for this course
|
||||||
|
lessons, err := s.ListLessons(ctx, course.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
course.Lessons = lessons
|
||||||
|
|
||||||
|
return &course, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCourses lists courses for a tenant with optional filters
|
||||||
|
func (s *Store) ListCourses(ctx context.Context, tenantID uuid.UUID, filters *CourseFilters) ([]Course, int, error) {
|
||||||
|
// Count query
|
||||||
|
countQuery := "SELECT COUNT(*) FROM academy_courses WHERE tenant_id = $1"
|
||||||
|
countArgs := []interface{}{tenantID}
|
||||||
|
countArgIdx := 2
|
||||||
|
|
||||||
|
// List query
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, title, description, category,
|
||||||
|
duration_minutes, required_for_roles, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM academy_courses WHERE tenant_id = $1`
|
||||||
|
|
||||||
|
args := []interface{}{tenantID}
|
||||||
|
argIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.Category != "" {
|
||||||
|
query += fmt.Sprintf(" AND category = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Category))
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND category = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Category))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.IsActive != nil {
|
||||||
|
query += fmt.Sprintf(" AND is_active = $%d", argIdx)
|
||||||
|
args = append(args, *filters.IsActive)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND is_active = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, *filters.IsActive)
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.Search != "" {
|
||||||
|
query += fmt.Sprintf(" AND (title ILIKE $%d OR description ILIKE $%d)", argIdx, argIdx)
|
||||||
|
args = append(args, "%"+filters.Search+"%")
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND (title ILIKE $%d OR description ILIKE $%d)", countArgIdx, countArgIdx)
|
||||||
|
countArgs = append(countArgs, "%"+filters.Search+"%")
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total count
|
||||||
|
var total int
|
||||||
|
err := s.pool.QueryRow(ctx, countQuery, countArgs...).Scan(&total)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
query += " ORDER BY created_at DESC"
|
||||||
|
|
||||||
|
if filters != nil && filters.Limit > 0 {
|
||||||
|
query += fmt.Sprintf(" LIMIT $%d", argIdx)
|
||||||
|
args = append(args, filters.Limit)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
if filters.Offset > 0 {
|
||||||
|
query += fmt.Sprintf(" OFFSET $%d", argIdx)
|
||||||
|
args = append(args, filters.Offset)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var courses []Course
|
||||||
|
for rows.Next() {
|
||||||
|
var course Course
|
||||||
|
var category string
|
||||||
|
var requiredForRoles []byte
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&course.ID, &course.TenantID, &course.Title, &course.Description, &category,
|
||||||
|
&course.DurationMinutes, &requiredForRoles, &course.IsActive,
|
||||||
|
&course.CreatedAt, &course.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
course.Category = CourseCategory(category)
|
||||||
|
json.Unmarshal(requiredForRoles, &course.RequiredForRoles)
|
||||||
|
if course.RequiredForRoles == nil {
|
||||||
|
course.RequiredForRoles = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
courses = append(courses, course)
|
||||||
|
}
|
||||||
|
|
||||||
|
if courses == nil {
|
||||||
|
courses = []Course{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return courses, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCourse updates a course
|
||||||
|
func (s *Store) UpdateCourse(ctx context.Context, course *Course) error {
|
||||||
|
course.UpdatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
requiredForRoles, _ := json.Marshal(course.RequiredForRoles)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE academy_courses SET
|
||||||
|
title = $2, description = $3, category = $4,
|
||||||
|
duration_minutes = $5, required_for_roles = $6, is_active = $7,
|
||||||
|
updated_at = $8
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
course.ID, course.Title, course.Description, string(course.Category),
|
||||||
|
course.DurationMinutes, requiredForRoles, course.IsActive,
|
||||||
|
course.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteCourse deletes a course and its related data (via CASCADE)
|
||||||
|
func (s *Store) DeleteCourse(ctx context.Context, id uuid.UUID) error {
|
||||||
|
_, err := s.pool.Exec(ctx, "DELETE FROM academy_courses WHERE id = $1", id)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Lesson Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateLesson creates a new lesson
|
||||||
|
func (s *Store) CreateLesson(ctx context.Context, lesson *Lesson) error {
|
||||||
|
lesson.ID = uuid.New()
|
||||||
|
|
||||||
|
quizQuestions, _ := json.Marshal(lesson.QuizQuestions)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO academy_lessons (
|
||||||
|
id, course_id, title, description, lesson_type,
|
||||||
|
content_url, duration_minutes, order_index, quiz_questions
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5,
|
||||||
|
$6, $7, $8, $9
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
lesson.ID, lesson.CourseID, lesson.Title, lesson.Description, string(lesson.LessonType),
|
||||||
|
lesson.ContentURL, lesson.DurationMinutes, lesson.OrderIndex, quizQuestions,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListLessons lists lessons for a course ordered by order_index
|
||||||
|
func (s *Store) ListLessons(ctx context.Context, courseID uuid.UUID) ([]Lesson, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, course_id, title, description, lesson_type,
|
||||||
|
content_url, duration_minutes, order_index, quiz_questions
|
||||||
|
FROM academy_lessons WHERE course_id = $1
|
||||||
|
ORDER BY order_index ASC
|
||||||
|
`, courseID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var lessons []Lesson
|
||||||
|
for rows.Next() {
|
||||||
|
var lesson Lesson
|
||||||
|
var lessonType string
|
||||||
|
var quizQuestions []byte
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&lesson.ID, &lesson.CourseID, &lesson.Title, &lesson.Description, &lessonType,
|
||||||
|
&lesson.ContentURL, &lesson.DurationMinutes, &lesson.OrderIndex, &quizQuestions,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lesson.LessonType = LessonType(lessonType)
|
||||||
|
json.Unmarshal(quizQuestions, &lesson.QuizQuestions)
|
||||||
|
if lesson.QuizQuestions == nil {
|
||||||
|
lesson.QuizQuestions = []QuizQuestion{}
|
||||||
|
}
|
||||||
|
|
||||||
|
lessons = append(lessons, lesson)
|
||||||
|
}
|
||||||
|
|
||||||
|
if lessons == nil {
|
||||||
|
lessons = []Lesson{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return lessons, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLesson retrieves a single lesson by ID
|
||||||
|
func (s *Store) GetLesson(ctx context.Context, id uuid.UUID) (*Lesson, error) {
|
||||||
|
var lesson Lesson
|
||||||
|
var lessonType string
|
||||||
|
var quizQuestions []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, course_id, title, description, lesson_type,
|
||||||
|
content_url, duration_minutes, order_index, quiz_questions
|
||||||
|
FROM academy_lessons WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&lesson.ID, &lesson.CourseID, &lesson.Title, &lesson.Description, &lessonType,
|
||||||
|
&lesson.ContentURL, &lesson.DurationMinutes, &lesson.OrderIndex, &quizQuestions,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lesson.LessonType = LessonType(lessonType)
|
||||||
|
json.Unmarshal(quizQuestions, &lesson.QuizQuestions)
|
||||||
|
if lesson.QuizQuestions == nil {
|
||||||
|
lesson.QuizQuestions = []QuizQuestion{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &lesson, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Enrollment Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateEnrollment creates a new enrollment
|
||||||
|
func (s *Store) CreateEnrollment(ctx context.Context, enrollment *Enrollment) error {
|
||||||
|
enrollment.ID = uuid.New()
|
||||||
|
enrollment.CreatedAt = time.Now().UTC()
|
||||||
|
enrollment.UpdatedAt = enrollment.CreatedAt
|
||||||
|
if enrollment.Status == "" {
|
||||||
|
enrollment.Status = EnrollmentStatusNotStarted
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO academy_enrollments (
|
||||||
|
id, tenant_id, course_id, user_id, user_name, user_email,
|
||||||
|
status, progress_percent, current_lesson_index,
|
||||||
|
started_at, completed_at, deadline,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5, $6,
|
||||||
|
$7, $8, $9,
|
||||||
|
$10, $11, $12,
|
||||||
|
$13, $14
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
enrollment.ID, enrollment.TenantID, enrollment.CourseID, enrollment.UserID, enrollment.UserName, enrollment.UserEmail,
|
||||||
|
string(enrollment.Status), enrollment.ProgressPercent, enrollment.CurrentLessonIndex,
|
||||||
|
enrollment.StartedAt, enrollment.CompletedAt, enrollment.Deadline,
|
||||||
|
enrollment.CreatedAt, enrollment.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetEnrollment retrieves an enrollment by ID
|
||||||
|
func (s *Store) GetEnrollment(ctx context.Context, id uuid.UUID) (*Enrollment, error) {
|
||||||
|
var enrollment Enrollment
|
||||||
|
var status string
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, course_id, user_id, user_name, user_email,
|
||||||
|
status, progress_percent, current_lesson_index,
|
||||||
|
started_at, completed_at, deadline,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM academy_enrollments WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&enrollment.ID, &enrollment.TenantID, &enrollment.CourseID, &enrollment.UserID, &enrollment.UserName, &enrollment.UserEmail,
|
||||||
|
&status, &enrollment.ProgressPercent, &enrollment.CurrentLessonIndex,
|
||||||
|
&enrollment.StartedAt, &enrollment.CompletedAt, &enrollment.Deadline,
|
||||||
|
&enrollment.CreatedAt, &enrollment.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment.Status = EnrollmentStatus(status)
|
||||||
|
return &enrollment, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListEnrollments lists enrollments for a tenant with optional filters
|
||||||
|
func (s *Store) ListEnrollments(ctx context.Context, tenantID uuid.UUID, filters *EnrollmentFilters) ([]Enrollment, int, error) {
|
||||||
|
// Count query
|
||||||
|
countQuery := "SELECT COUNT(*) FROM academy_enrollments WHERE tenant_id = $1"
|
||||||
|
countArgs := []interface{}{tenantID}
|
||||||
|
countArgIdx := 2
|
||||||
|
|
||||||
|
// List query
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, course_id, user_id, user_name, user_email,
|
||||||
|
status, progress_percent, current_lesson_index,
|
||||||
|
started_at, completed_at, deadline,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM academy_enrollments WHERE tenant_id = $1`
|
||||||
|
|
||||||
|
args := []interface{}{tenantID}
|
||||||
|
argIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.CourseID != nil {
|
||||||
|
query += fmt.Sprintf(" AND course_id = $%d", argIdx)
|
||||||
|
args = append(args, *filters.CourseID)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND course_id = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, *filters.CourseID)
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.UserID != nil {
|
||||||
|
query += fmt.Sprintf(" AND user_id = $%d", argIdx)
|
||||||
|
args = append(args, *filters.UserID)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND user_id = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, *filters.UserID)
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.Status != "" {
|
||||||
|
query += fmt.Sprintf(" AND status = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Status))
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
countQuery += fmt.Sprintf(" AND status = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Status))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total count
|
||||||
|
var total int
|
||||||
|
err := s.pool.QueryRow(ctx, countQuery, countArgs...).Scan(&total)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
query += " ORDER BY created_at DESC"
|
||||||
|
|
||||||
|
if filters != nil && filters.Limit > 0 {
|
||||||
|
query += fmt.Sprintf(" LIMIT $%d", argIdx)
|
||||||
|
args = append(args, filters.Limit)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
if filters.Offset > 0 {
|
||||||
|
query += fmt.Sprintf(" OFFSET $%d", argIdx)
|
||||||
|
args = append(args, filters.Offset)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var enrollments []Enrollment
|
||||||
|
for rows.Next() {
|
||||||
|
var enrollment Enrollment
|
||||||
|
var status string
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&enrollment.ID, &enrollment.TenantID, &enrollment.CourseID, &enrollment.UserID, &enrollment.UserName, &enrollment.UserEmail,
|
||||||
|
&status, &enrollment.ProgressPercent, &enrollment.CurrentLessonIndex,
|
||||||
|
&enrollment.StartedAt, &enrollment.CompletedAt, &enrollment.Deadline,
|
||||||
|
&enrollment.CreatedAt, &enrollment.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment.Status = EnrollmentStatus(status)
|
||||||
|
enrollments = append(enrollments, enrollment)
|
||||||
|
}
|
||||||
|
|
||||||
|
if enrollments == nil {
|
||||||
|
enrollments = []Enrollment{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return enrollments, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateEnrollmentProgress updates the progress for an enrollment
|
||||||
|
func (s *Store) UpdateEnrollmentProgress(ctx context.Context, id uuid.UUID, progress int, currentLesson int) error {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
// If progress > 0, set started_at if not already set and update status to in_progress
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE academy_enrollments SET
|
||||||
|
progress_percent = $2,
|
||||||
|
current_lesson_index = $3,
|
||||||
|
status = CASE
|
||||||
|
WHEN $2 >= 100 THEN 'completed'
|
||||||
|
WHEN $2 > 0 THEN 'in_progress'
|
||||||
|
ELSE status
|
||||||
|
END,
|
||||||
|
started_at = CASE
|
||||||
|
WHEN started_at IS NULL AND $2 > 0 THEN $4
|
||||||
|
ELSE started_at
|
||||||
|
END,
|
||||||
|
completed_at = CASE
|
||||||
|
WHEN $2 >= 100 THEN $4
|
||||||
|
ELSE completed_at
|
||||||
|
END,
|
||||||
|
updated_at = $4
|
||||||
|
WHERE id = $1
|
||||||
|
`, id, progress, currentLesson, now)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteEnrollment marks an enrollment as completed
|
||||||
|
func (s *Store) CompleteEnrollment(ctx context.Context, id uuid.UUID) error {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE academy_enrollments SET
|
||||||
|
status = 'completed',
|
||||||
|
progress_percent = 100,
|
||||||
|
completed_at = $2,
|
||||||
|
updated_at = $2
|
||||||
|
WHERE id = $1
|
||||||
|
`, id, now)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Certificate Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetCertificate retrieves a certificate by ID
|
||||||
|
func (s *Store) GetCertificate(ctx context.Context, id uuid.UUID) (*Certificate, error) {
|
||||||
|
var cert Certificate
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, enrollment_id, user_name, course_title,
|
||||||
|
issued_at, valid_until, pdf_url
|
||||||
|
FROM academy_certificates WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&cert.ID, &cert.EnrollmentID, &cert.UserName, &cert.CourseTitle,
|
||||||
|
&cert.IssuedAt, &cert.ValidUntil, &cert.PDFURL,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &cert, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCertificateByEnrollment retrieves a certificate by enrollment ID
|
||||||
|
func (s *Store) GetCertificateByEnrollment(ctx context.Context, enrollmentID uuid.UUID) (*Certificate, error) {
|
||||||
|
var cert Certificate
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, enrollment_id, user_name, course_title,
|
||||||
|
issued_at, valid_until, pdf_url
|
||||||
|
FROM academy_certificates WHERE enrollment_id = $1
|
||||||
|
`, enrollmentID).Scan(
|
||||||
|
&cert.ID, &cert.EnrollmentID, &cert.UserName, &cert.CourseTitle,
|
||||||
|
&cert.IssuedAt, &cert.ValidUntil, &cert.PDFURL,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &cert, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateCertificate creates a new certificate
|
||||||
|
func (s *Store) CreateCertificate(ctx context.Context, cert *Certificate) error {
|
||||||
|
cert.ID = uuid.New()
|
||||||
|
cert.IssuedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO academy_certificates (
|
||||||
|
id, enrollment_id, user_name, course_title,
|
||||||
|
issued_at, valid_until, pdf_url
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4,
|
||||||
|
$5, $6, $7
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
cert.ID, cert.EnrollmentID, cert.UserName, cert.CourseTitle,
|
||||||
|
cert.IssuedAt, cert.ValidUntil, cert.PDFURL,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns aggregated academy statistics for a tenant
|
||||||
|
func (s *Store) GetStatistics(ctx context.Context, tenantID uuid.UUID) (*AcademyStatistics, error) {
|
||||||
|
stats := &AcademyStatistics{}
|
||||||
|
|
||||||
|
// Total active courses
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM academy_courses WHERE tenant_id = $1 AND is_active = true",
|
||||||
|
tenantID).Scan(&stats.TotalCourses)
|
||||||
|
|
||||||
|
// Total enrollments
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM academy_enrollments WHERE tenant_id = $1",
|
||||||
|
tenantID).Scan(&stats.TotalEnrollments)
|
||||||
|
|
||||||
|
// Completion rate
|
||||||
|
if stats.TotalEnrollments > 0 {
|
||||||
|
var completed int
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM academy_enrollments WHERE tenant_id = $1 AND status = 'completed'",
|
||||||
|
tenantID).Scan(&completed)
|
||||||
|
stats.CompletionRate = float64(completed) / float64(stats.TotalEnrollments) * 100
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overdue count (past deadline, not completed)
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
`SELECT COUNT(*) FROM academy_enrollments
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
AND status NOT IN ('completed', 'expired')
|
||||||
|
AND deadline IS NOT NULL
|
||||||
|
AND deadline < NOW()`,
|
||||||
|
tenantID).Scan(&stats.OverdueCount)
|
||||||
|
|
||||||
|
// Average completion days
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
`SELECT COALESCE(AVG(EXTRACT(EPOCH FROM (completed_at - started_at)) / 86400), 0)
|
||||||
|
FROM academy_enrollments
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
AND status = 'completed'
|
||||||
|
AND started_at IS NOT NULL
|
||||||
|
AND completed_at IS NOT NULL`,
|
||||||
|
tenantID).Scan(&stats.AvgCompletionDays)
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
587
ai-compliance-sdk/internal/api/handlers/academy_handlers.go
Normal file
587
ai-compliance-sdk/internal/api/handlers/academy_handlers.go
Normal file
@@ -0,0 +1,587 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/academy"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AcademyHandlers handles academy HTTP requests
|
||||||
|
type AcademyHandlers struct {
|
||||||
|
store *academy.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAcademyHandlers creates new academy handlers
|
||||||
|
func NewAcademyHandlers(store *academy.Store) *AcademyHandlers {
|
||||||
|
return &AcademyHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Course Management
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateCourse creates a new compliance training course
|
||||||
|
// POST /sdk/v1/academy/courses
|
||||||
|
func (h *AcademyHandlers) CreateCourse(c *gin.Context) {
|
||||||
|
var req academy.CreateCourseRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
course := &academy.Course{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
Category: req.Category,
|
||||||
|
DurationMinutes: req.DurationMinutes,
|
||||||
|
RequiredForRoles: req.RequiredForRoles,
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if course.RequiredForRoles == nil {
|
||||||
|
course.RequiredForRoles = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateCourse(c.Request.Context(), course); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create lessons if provided
|
||||||
|
for i := range req.Lessons {
|
||||||
|
lesson := &academy.Lesson{
|
||||||
|
CourseID: course.ID,
|
||||||
|
Title: req.Lessons[i].Title,
|
||||||
|
Description: req.Lessons[i].Description,
|
||||||
|
LessonType: req.Lessons[i].LessonType,
|
||||||
|
ContentURL: req.Lessons[i].ContentURL,
|
||||||
|
DurationMinutes: req.Lessons[i].DurationMinutes,
|
||||||
|
OrderIndex: req.Lessons[i].OrderIndex,
|
||||||
|
QuizQuestions: req.Lessons[i].QuizQuestions,
|
||||||
|
}
|
||||||
|
if err := h.store.CreateLesson(c.Request.Context(), lesson); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
course.Lessons = append(course.Lessons, *lesson)
|
||||||
|
}
|
||||||
|
|
||||||
|
if course.Lessons == nil {
|
||||||
|
course.Lessons = []academy.Lesson{}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"course": course})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCourse retrieves a course with its lessons
|
||||||
|
// GET /sdk/v1/academy/courses/:id
|
||||||
|
func (h *AcademyHandlers) GetCourse(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid course ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
course, err := h.store.GetCourse(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if course == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "course not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"course": course})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCourses lists courses for the current tenant
|
||||||
|
// GET /sdk/v1/academy/courses
|
||||||
|
func (h *AcademyHandlers) ListCourses(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
filters := &academy.CourseFilters{
|
||||||
|
Limit: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters.Category = academy.CourseCategory(category)
|
||||||
|
}
|
||||||
|
if search := c.Query("search"); search != "" {
|
||||||
|
filters.Search = search
|
||||||
|
}
|
||||||
|
if activeStr := c.Query("is_active"); activeStr != "" {
|
||||||
|
active := activeStr == "true"
|
||||||
|
filters.IsActive = &active
|
||||||
|
}
|
||||||
|
if limitStr := c.Query("limit"); limitStr != "" {
|
||||||
|
if limit, err := strconv.Atoi(limitStr); err == nil && limit > 0 {
|
||||||
|
filters.Limit = limit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if offsetStr := c.Query("offset"); offsetStr != "" {
|
||||||
|
if offset, err := strconv.Atoi(offsetStr); err == nil && offset >= 0 {
|
||||||
|
filters.Offset = offset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
courses, total, err := h.store.ListCourses(c.Request.Context(), tenantID, filters)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, academy.CourseListResponse{
|
||||||
|
Courses: courses,
|
||||||
|
Total: total,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCourse updates a course
|
||||||
|
// PUT /sdk/v1/academy/courses/:id
|
||||||
|
func (h *AcademyHandlers) UpdateCourse(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid course ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
course, err := h.store.GetCourse(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if course == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "course not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req academy.UpdateCourseRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Title != nil {
|
||||||
|
course.Title = *req.Title
|
||||||
|
}
|
||||||
|
if req.Description != nil {
|
||||||
|
course.Description = *req.Description
|
||||||
|
}
|
||||||
|
if req.Category != nil {
|
||||||
|
course.Category = *req.Category
|
||||||
|
}
|
||||||
|
if req.DurationMinutes != nil {
|
||||||
|
course.DurationMinutes = *req.DurationMinutes
|
||||||
|
}
|
||||||
|
if req.RequiredForRoles != nil {
|
||||||
|
course.RequiredForRoles = req.RequiredForRoles
|
||||||
|
}
|
||||||
|
if req.IsActive != nil {
|
||||||
|
course.IsActive = *req.IsActive
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateCourse(c.Request.Context(), course); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"course": course})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteCourse deletes a course
|
||||||
|
// DELETE /sdk/v1/academy/courses/:id
|
||||||
|
func (h *AcademyHandlers) DeleteCourse(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid course ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.DeleteCourse(c.Request.Context(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "course deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Enrollment Management
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateEnrollment enrolls a user in a course
|
||||||
|
// POST /sdk/v1/academy/enrollments
|
||||||
|
func (h *AcademyHandlers) CreateEnrollment(c *gin.Context) {
|
||||||
|
var req academy.EnrollUserRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
// Verify course exists
|
||||||
|
course, err := h.store.GetCourse(c.Request.Context(), req.CourseID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if course == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "course not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment := &academy.Enrollment{
|
||||||
|
TenantID: tenantID,
|
||||||
|
CourseID: req.CourseID,
|
||||||
|
UserID: req.UserID,
|
||||||
|
UserName: req.UserName,
|
||||||
|
UserEmail: req.UserEmail,
|
||||||
|
Status: academy.EnrollmentStatusNotStarted,
|
||||||
|
Deadline: req.Deadline,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateEnrollment(c.Request.Context(), enrollment); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"enrollment": enrollment})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListEnrollments lists enrollments for the current tenant
|
||||||
|
// GET /sdk/v1/academy/enrollments
|
||||||
|
func (h *AcademyHandlers) ListEnrollments(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
filters := &academy.EnrollmentFilters{
|
||||||
|
Limit: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if status := c.Query("status"); status != "" {
|
||||||
|
filters.Status = academy.EnrollmentStatus(status)
|
||||||
|
}
|
||||||
|
if courseIDStr := c.Query("course_id"); courseIDStr != "" {
|
||||||
|
if courseID, err := uuid.Parse(courseIDStr); err == nil {
|
||||||
|
filters.CourseID = &courseID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if userIDStr := c.Query("user_id"); userIDStr != "" {
|
||||||
|
if userID, err := uuid.Parse(userIDStr); err == nil {
|
||||||
|
filters.UserID = &userID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limitStr := c.Query("limit"); limitStr != "" {
|
||||||
|
if limit, err := strconv.Atoi(limitStr); err == nil && limit > 0 {
|
||||||
|
filters.Limit = limit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if offsetStr := c.Query("offset"); offsetStr != "" {
|
||||||
|
if offset, err := strconv.Atoi(offsetStr); err == nil && offset >= 0 {
|
||||||
|
filters.Offset = offset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollments, total, err := h.store.ListEnrollments(c.Request.Context(), tenantID, filters)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, academy.EnrollmentListResponse{
|
||||||
|
Enrollments: enrollments,
|
||||||
|
Total: total,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateProgress updates an enrollment's progress
|
||||||
|
// PUT /sdk/v1/academy/enrollments/:id/progress
|
||||||
|
func (h *AcademyHandlers) UpdateProgress(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid enrollment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment, err := h.store.GetEnrollment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if enrollment == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "enrollment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req academy.UpdateProgressRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Progress < 0 || req.Progress > 100 {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "progress must be between 0 and 100"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateEnrollmentProgress(c.Request.Context(), id, req.Progress, req.CurrentLesson); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch updated enrollment
|
||||||
|
updated, err := h.store.GetEnrollment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"enrollment": updated})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteEnrollment marks an enrollment as completed
|
||||||
|
// POST /sdk/v1/academy/enrollments/:id/complete
|
||||||
|
func (h *AcademyHandlers) CompleteEnrollment(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid enrollment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment, err := h.store.GetEnrollment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if enrollment == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "enrollment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if enrollment.Status == academy.EnrollmentStatusCompleted {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "enrollment already completed"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CompleteEnrollment(c.Request.Context(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch updated enrollment
|
||||||
|
updated, err := h.store.GetEnrollment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"enrollment": updated,
|
||||||
|
"message": "enrollment completed",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Certificate Management
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetCertificate retrieves a certificate
|
||||||
|
// GET /sdk/v1/academy/certificates/:id
|
||||||
|
func (h *AcademyHandlers) GetCertificate(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid certificate ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cert, err := h.store.GetCertificate(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if cert == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "certificate not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"certificate": cert})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateCertificate generates a certificate for a completed enrollment
|
||||||
|
// POST /sdk/v1/academy/enrollments/:id/certificate
|
||||||
|
func (h *AcademyHandlers) GenerateCertificate(c *gin.Context) {
|
||||||
|
enrollmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid enrollment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
enrollment, err := h.store.GetEnrollment(c.Request.Context(), enrollmentID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if enrollment == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "enrollment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if enrollment.Status != academy.EnrollmentStatusCompleted {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "enrollment must be completed before generating certificate"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if certificate already exists
|
||||||
|
existing, err := h.store.GetCertificateByEnrollment(c.Request.Context(), enrollmentID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if existing != nil {
|
||||||
|
c.JSON(http.StatusOK, gin.H{"certificate": existing, "message": "certificate already exists"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the course for the certificate title
|
||||||
|
course, err := h.store.GetCourse(c.Request.Context(), enrollment.CourseID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
courseTitle := "Unknown Course"
|
||||||
|
if course != nil {
|
||||||
|
courseTitle = course.Title
|
||||||
|
}
|
||||||
|
|
||||||
|
// Certificate is valid for 1 year by default
|
||||||
|
validUntil := time.Now().UTC().AddDate(1, 0, 0)
|
||||||
|
|
||||||
|
cert := &academy.Certificate{
|
||||||
|
EnrollmentID: enrollmentID,
|
||||||
|
UserName: enrollment.UserName,
|
||||||
|
CourseTitle: courseTitle,
|
||||||
|
ValidUntil: &validUntil,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateCertificate(c.Request.Context(), cert); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"certificate": cert})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Quiz Submission
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SubmitQuiz submits quiz answers and returns the results
|
||||||
|
// POST /sdk/v1/academy/enrollments/:id/quiz
|
||||||
|
func (h *AcademyHandlers) SubmitQuiz(c *gin.Context) {
|
||||||
|
enrollmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid enrollment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req academy.SubmitQuizRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify enrollment exists
|
||||||
|
enrollment, err := h.store.GetEnrollment(c.Request.Context(), enrollmentID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if enrollment == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "enrollment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the lesson with quiz questions
|
||||||
|
lesson, err := h.store.GetLesson(c.Request.Context(), req.LessonID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if lesson == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "lesson not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(lesson.QuizQuestions) == 0 {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "lesson has no quiz questions"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(req.Answers) != len(lesson.QuizQuestions) {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "number of answers must match number of questions"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grade the quiz
|
||||||
|
correctCount := 0
|
||||||
|
var results []academy.QuizResult
|
||||||
|
|
||||||
|
for i, question := range lesson.QuizQuestions {
|
||||||
|
correct := req.Answers[i] == question.CorrectIndex
|
||||||
|
if correct {
|
||||||
|
correctCount++
|
||||||
|
}
|
||||||
|
results = append(results, academy.QuizResult{
|
||||||
|
Question: question.Question,
|
||||||
|
Correct: correct,
|
||||||
|
Explanation: question.Explanation,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
totalQuestions := len(lesson.QuizQuestions)
|
||||||
|
score := 0
|
||||||
|
if totalQuestions > 0 {
|
||||||
|
score = (correctCount * 100) / totalQuestions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pass threshold: 70%
|
||||||
|
passed := score >= 70
|
||||||
|
|
||||||
|
response := academy.SubmitQuizResponse{
|
||||||
|
Score: score,
|
||||||
|
Passed: passed,
|
||||||
|
CorrectAnswers: correctCount,
|
||||||
|
TotalQuestions: totalQuestions,
|
||||||
|
Results: results,
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns academy statistics for the current tenant
|
||||||
|
// GET /sdk/v1/academy/statistics
|
||||||
|
func (h *AcademyHandlers) GetStatistics(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
stats, err := h.store.GetStatistics(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, stats)
|
||||||
|
}
|
||||||
451
ai-compliance-sdk/internal/api/handlers/dsb_handlers.go
Normal file
451
ai-compliance-sdk/internal/api/handlers/dsb_handlers.go
Normal file
@@ -0,0 +1,451 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/dsb"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DSBHandlers handles DSB-as-a-Service portal HTTP requests.
|
||||||
|
type DSBHandlers struct {
|
||||||
|
store *dsb.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDSBHandlers creates new DSB handlers.
|
||||||
|
func NewDSBHandlers(store *dsb.Store) *DSBHandlers {
|
||||||
|
return &DSBHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getDSBUserID extracts and parses the X-User-ID header as UUID.
|
||||||
|
func getDSBUserID(c *gin.Context) (uuid.UUID, bool) {
|
||||||
|
userIDStr := c.GetHeader("X-User-ID")
|
||||||
|
if userIDStr == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "X-User-ID header is required"})
|
||||||
|
return uuid.Nil, false
|
||||||
|
}
|
||||||
|
userID, err := uuid.Parse(userIDStr)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid X-User-ID header: must be a valid UUID"})
|
||||||
|
return uuid.Nil, false
|
||||||
|
}
|
||||||
|
return userID, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Dashboard
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetDashboard returns the aggregated DSB dashboard.
|
||||||
|
// GET /sdk/v1/dsb/dashboard
|
||||||
|
func (h *DSBHandlers) GetDashboard(c *gin.Context) {
|
||||||
|
dsbUserID, ok := getDSBUserID(c)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
dashboard, err := h.store.GetDashboard(c.Request.Context(), dsbUserID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, dashboard)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Assignments
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateAssignment creates a new DSB-to-tenant assignment.
|
||||||
|
// POST /sdk/v1/dsb/assignments
|
||||||
|
func (h *DSBHandlers) CreateAssignment(c *gin.Context) {
|
||||||
|
var req dsb.CreateAssignmentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assignment := &dsb.Assignment{
|
||||||
|
DSBUserID: req.DSBUserID,
|
||||||
|
TenantID: req.TenantID,
|
||||||
|
Status: req.Status,
|
||||||
|
ContractStart: req.ContractStart,
|
||||||
|
ContractEnd: req.ContractEnd,
|
||||||
|
MonthlyHoursBudget: req.MonthlyHoursBudget,
|
||||||
|
Notes: req.Notes,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateAssignment(c.Request.Context(), assignment); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"assignment": assignment})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListAssignments returns all assignments for the authenticated DSB user.
|
||||||
|
// GET /sdk/v1/dsb/assignments
|
||||||
|
func (h *DSBHandlers) ListAssignments(c *gin.Context) {
|
||||||
|
dsbUserID, ok := getDSBUserID(c)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assignments, err := h.store.ListAssignments(c.Request.Context(), dsbUserID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"assignments": assignments,
|
||||||
|
"total": len(assignments),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAssignment retrieves a single assignment by ID.
|
||||||
|
// GET /sdk/v1/dsb/assignments/:id
|
||||||
|
func (h *DSBHandlers) GetAssignment(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assignment, err := h.store.GetAssignment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "assignment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"assignment": assignment})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateAssignment updates an existing assignment.
|
||||||
|
// PUT /sdk/v1/dsb/assignments/:id
|
||||||
|
func (h *DSBHandlers) UpdateAssignment(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assignment, err := h.store.GetAssignment(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "assignment not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req dsb.UpdateAssignmentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply non-nil fields
|
||||||
|
if req.Status != nil {
|
||||||
|
assignment.Status = *req.Status
|
||||||
|
}
|
||||||
|
if req.ContractEnd != nil {
|
||||||
|
assignment.ContractEnd = req.ContractEnd
|
||||||
|
}
|
||||||
|
if req.MonthlyHoursBudget != nil {
|
||||||
|
assignment.MonthlyHoursBudget = *req.MonthlyHoursBudget
|
||||||
|
}
|
||||||
|
if req.Notes != nil {
|
||||||
|
assignment.Notes = *req.Notes
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateAssignment(c.Request.Context(), assignment); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"assignment": assignment})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Hours
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateHourEntry creates a new time tracking entry for an assignment.
|
||||||
|
// POST /sdk/v1/dsb/assignments/:id/hours
|
||||||
|
func (h *DSBHandlers) CreateHourEntry(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req dsb.CreateHourEntryRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
billable := true
|
||||||
|
if req.Billable != nil {
|
||||||
|
billable = *req.Billable
|
||||||
|
}
|
||||||
|
|
||||||
|
entry := &dsb.HourEntry{
|
||||||
|
AssignmentID: assignmentID,
|
||||||
|
Date: req.Date,
|
||||||
|
Hours: req.Hours,
|
||||||
|
Category: req.Category,
|
||||||
|
Description: req.Description,
|
||||||
|
Billable: billable,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateHourEntry(c.Request.Context(), entry); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"hour_entry": entry})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListHours returns time entries for an assignment.
|
||||||
|
// GET /sdk/v1/dsb/assignments/:id/hours?month=YYYY-MM
|
||||||
|
func (h *DSBHandlers) ListHours(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
month := c.Query("month")
|
||||||
|
|
||||||
|
entries, err := h.store.ListHours(c.Request.Context(), assignmentID, month)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"hours": entries,
|
||||||
|
"total": len(entries),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetHoursSummary returns aggregated hour statistics for an assignment.
|
||||||
|
// GET /sdk/v1/dsb/assignments/:id/hours/summary?month=YYYY-MM
|
||||||
|
func (h *DSBHandlers) GetHoursSummary(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
month := c.Query("month")
|
||||||
|
|
||||||
|
summary, err := h.store.GetHoursSummary(c.Request.Context(), assignmentID, month)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Tasks
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateTask creates a new task for an assignment.
|
||||||
|
// POST /sdk/v1/dsb/assignments/:id/tasks
|
||||||
|
func (h *DSBHandlers) CreateTask(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req dsb.CreateTaskRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
task := &dsb.Task{
|
||||||
|
AssignmentID: assignmentID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
Category: req.Category,
|
||||||
|
Priority: req.Priority,
|
||||||
|
DueDate: req.DueDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateTask(c.Request.Context(), task); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"task": task})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTasks returns tasks for an assignment.
|
||||||
|
// GET /sdk/v1/dsb/assignments/:id/tasks?status=open
|
||||||
|
func (h *DSBHandlers) ListTasks(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
status := c.Query("status")
|
||||||
|
|
||||||
|
tasks, err := h.store.ListTasks(c.Request.Context(), assignmentID, status)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"tasks": tasks,
|
||||||
|
"total": len(tasks),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTask updates an existing task.
|
||||||
|
// PUT /sdk/v1/dsb/tasks/:taskId
|
||||||
|
func (h *DSBHandlers) UpdateTask(c *gin.Context) {
|
||||||
|
taskID, err := uuid.Parse(c.Param("taskId"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid task ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need to fetch the existing task first. Since tasks belong to assignments,
|
||||||
|
// we query by task ID directly. For now, we do a lightweight approach: bind the
|
||||||
|
// update request and apply changes via store.
|
||||||
|
var req dsb.UpdateTaskRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch current task by querying all tasks and filtering. Since we don't have
|
||||||
|
// a GetTask(taskID) method, we build the task from partial data and update.
|
||||||
|
// The store UpdateTask uses the task ID to locate the row.
|
||||||
|
task := &dsb.Task{ID: taskID}
|
||||||
|
|
||||||
|
// We need to get the current values to apply partial updates correctly.
|
||||||
|
// Query the task directly.
|
||||||
|
row := h.store.Pool().QueryRow(c.Request.Context(), `
|
||||||
|
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
|
||||||
|
FROM dsb_tasks WHERE id = $1
|
||||||
|
`, taskID)
|
||||||
|
|
||||||
|
if err := row.Scan(
|
||||||
|
&task.ID, &task.AssignmentID, &task.Title, &task.Description,
|
||||||
|
&task.Category, &task.Priority, &task.Status, &task.DueDate,
|
||||||
|
&task.CompletedAt, &task.CreatedAt, &task.UpdatedAt,
|
||||||
|
); err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "task not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply non-nil fields
|
||||||
|
if req.Title != nil {
|
||||||
|
task.Title = *req.Title
|
||||||
|
}
|
||||||
|
if req.Description != nil {
|
||||||
|
task.Description = *req.Description
|
||||||
|
}
|
||||||
|
if req.Category != nil {
|
||||||
|
task.Category = *req.Category
|
||||||
|
}
|
||||||
|
if req.Priority != nil {
|
||||||
|
task.Priority = *req.Priority
|
||||||
|
}
|
||||||
|
if req.Status != nil {
|
||||||
|
task.Status = *req.Status
|
||||||
|
}
|
||||||
|
if req.DueDate != nil {
|
||||||
|
task.DueDate = req.DueDate
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateTask(c.Request.Context(), task); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"task": task})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteTask marks a task as completed.
|
||||||
|
// POST /sdk/v1/dsb/tasks/:taskId/complete
|
||||||
|
func (h *DSBHandlers) CompleteTask(c *gin.Context) {
|
||||||
|
taskID, err := uuid.Parse(c.Param("taskId"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid task ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CompleteTask(c.Request.Context(), taskID); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "task completed"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Communications
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateCommunication creates a new communication log entry.
|
||||||
|
// POST /sdk/v1/dsb/assignments/:id/communications
|
||||||
|
func (h *DSBHandlers) CreateCommunication(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req dsb.CreateCommunicationRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
comm := &dsb.Communication{
|
||||||
|
AssignmentID: assignmentID,
|
||||||
|
Direction: req.Direction,
|
||||||
|
Channel: req.Channel,
|
||||||
|
Subject: req.Subject,
|
||||||
|
Content: req.Content,
|
||||||
|
Participants: req.Participants,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateCommunication(c.Request.Context(), comm); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"communication": comm})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCommunications returns all communications for an assignment.
|
||||||
|
// GET /sdk/v1/dsb/assignments/:id/communications
|
||||||
|
func (h *DSBHandlers) ListCommunications(c *gin.Context) {
|
||||||
|
assignmentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid assignment ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
comms, err := h.store.ListCommunications(c.Request.Context(), assignmentID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"communications": comms,
|
||||||
|
"total": len(comms),
|
||||||
|
})
|
||||||
|
}
|
||||||
668
ai-compliance-sdk/internal/api/handlers/incidents_handlers.go
Normal file
668
ai-compliance-sdk/internal/api/handlers/incidents_handlers.go
Normal file
@@ -0,0 +1,668 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/incidents"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IncidentHandlers handles incident/breach management HTTP requests
|
||||||
|
type IncidentHandlers struct {
|
||||||
|
store *incidents.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewIncidentHandlers creates new incident handlers
|
||||||
|
func NewIncidentHandlers(store *incidents.Store) *IncidentHandlers {
|
||||||
|
return &IncidentHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Incident CRUD
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateIncident creates a new incident
|
||||||
|
// POST /sdk/v1/incidents
|
||||||
|
func (h *IncidentHandlers) CreateIncident(c *gin.Context) {
|
||||||
|
var req incidents.CreateIncidentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
detectedAt := time.Now().UTC()
|
||||||
|
if req.DetectedAt != nil {
|
||||||
|
detectedAt = *req.DetectedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-calculate 72h deadline per DSGVO Art. 33
|
||||||
|
deadline := incidents.Calculate72hDeadline(detectedAt)
|
||||||
|
|
||||||
|
incident := &incidents.Incident{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
Category: req.Category,
|
||||||
|
Status: incidents.IncidentStatusDetected,
|
||||||
|
Severity: req.Severity,
|
||||||
|
DetectedAt: detectedAt,
|
||||||
|
ReportedBy: userID,
|
||||||
|
AffectedDataCategories: req.AffectedDataCategories,
|
||||||
|
AffectedDataSubjectCount: req.AffectedDataSubjectCount,
|
||||||
|
AffectedSystems: req.AffectedSystems,
|
||||||
|
AuthorityNotification: &incidents.AuthorityNotification{
|
||||||
|
Status: incidents.NotificationStatusPending,
|
||||||
|
Deadline: deadline,
|
||||||
|
},
|
||||||
|
DataSubjectNotification: &incidents.DataSubjectNotification{
|
||||||
|
Required: false,
|
||||||
|
Status: incidents.NotificationStatusNotRequired,
|
||||||
|
},
|
||||||
|
Timeline: []incidents.TimelineEntry{
|
||||||
|
{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "incident_created",
|
||||||
|
UserID: userID,
|
||||||
|
Details: "Incident detected and reported",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateIncident(c.Request.Context(), incident); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"incident": incident,
|
||||||
|
"authority_deadline": deadline,
|
||||||
|
"hours_until_deadline": time.Until(deadline).Hours(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIncident retrieves an incident by ID
|
||||||
|
// GET /sdk/v1/incidents/:id
|
||||||
|
func (h *IncidentHandlers) GetIncident(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get measures
|
||||||
|
measures, _ := h.store.ListMeasures(c.Request.Context(), id)
|
||||||
|
|
||||||
|
// Calculate deadline info if authority notification exists
|
||||||
|
var deadlineInfo gin.H
|
||||||
|
if incident.AuthorityNotification != nil {
|
||||||
|
hoursRemaining := time.Until(incident.AuthorityNotification.Deadline).Hours()
|
||||||
|
deadlineInfo = gin.H{
|
||||||
|
"deadline": incident.AuthorityNotification.Deadline,
|
||||||
|
"hours_remaining": hoursRemaining,
|
||||||
|
"overdue": hoursRemaining < 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"incident": incident,
|
||||||
|
"measures": measures,
|
||||||
|
"deadline_info": deadlineInfo,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListIncidents lists incidents for a tenant
|
||||||
|
// GET /sdk/v1/incidents
|
||||||
|
func (h *IncidentHandlers) ListIncidents(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
filters := &incidents.IncidentFilters{
|
||||||
|
Limit: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if status := c.Query("status"); status != "" {
|
||||||
|
filters.Status = incidents.IncidentStatus(status)
|
||||||
|
}
|
||||||
|
if severity := c.Query("severity"); severity != "" {
|
||||||
|
filters.Severity = incidents.IncidentSeverity(severity)
|
||||||
|
}
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters.Category = incidents.IncidentCategory(category)
|
||||||
|
}
|
||||||
|
|
||||||
|
incidentList, total, err := h.store.ListIncidents(c.Request.Context(), tenantID, filters)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, incidents.IncidentListResponse{
|
||||||
|
Incidents: incidentList,
|
||||||
|
Total: total,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateIncident updates an incident
|
||||||
|
// PUT /sdk/v1/incidents/:id
|
||||||
|
func (h *IncidentHandlers) UpdateIncident(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.UpdateIncidentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.Title != "" {
|
||||||
|
incident.Title = req.Title
|
||||||
|
}
|
||||||
|
if req.Description != "" {
|
||||||
|
incident.Description = req.Description
|
||||||
|
}
|
||||||
|
if req.Category != "" {
|
||||||
|
incident.Category = req.Category
|
||||||
|
}
|
||||||
|
if req.Status != "" {
|
||||||
|
incident.Status = req.Status
|
||||||
|
}
|
||||||
|
if req.Severity != "" {
|
||||||
|
incident.Severity = req.Severity
|
||||||
|
}
|
||||||
|
if req.AffectedDataCategories != nil {
|
||||||
|
incident.AffectedDataCategories = req.AffectedDataCategories
|
||||||
|
}
|
||||||
|
if req.AffectedDataSubjectCount != nil {
|
||||||
|
incident.AffectedDataSubjectCount = *req.AffectedDataSubjectCount
|
||||||
|
}
|
||||||
|
if req.AffectedSystems != nil {
|
||||||
|
incident.AffectedSystems = req.AffectedSystems
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateIncident(c.Request.Context(), incident); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"incident": incident})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteIncident deletes an incident
|
||||||
|
// DELETE /sdk/v1/incidents/:id
|
||||||
|
func (h *IncidentHandlers) DeleteIncident(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.DeleteIncident(c.Request.Context(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "incident deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Risk Assessment
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AssessRisk performs a risk assessment for an incident
|
||||||
|
// POST /sdk/v1/incidents/:id/risk-assessment
|
||||||
|
func (h *IncidentHandlers) AssessRisk(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.RiskAssessmentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
// Auto-calculate risk level
|
||||||
|
riskLevel := incidents.CalculateRiskLevel(req.Likelihood, req.Impact)
|
||||||
|
notificationRequired := incidents.IsNotificationRequired(riskLevel)
|
||||||
|
|
||||||
|
assessment := &incidents.RiskAssessment{
|
||||||
|
Likelihood: req.Likelihood,
|
||||||
|
Impact: req.Impact,
|
||||||
|
RiskLevel: riskLevel,
|
||||||
|
AssessedAt: time.Now().UTC(),
|
||||||
|
AssessedBy: userID,
|
||||||
|
Notes: req.Notes,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateRiskAssessment(c.Request.Context(), id, assessment); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update status to assessment
|
||||||
|
incident.Status = incidents.IncidentStatusAssessment
|
||||||
|
h.store.UpdateIncident(c.Request.Context(), incident)
|
||||||
|
|
||||||
|
// Add timeline entry
|
||||||
|
h.store.AddTimelineEntry(c.Request.Context(), id, incidents.TimelineEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "risk_assessed",
|
||||||
|
UserID: userID,
|
||||||
|
Details: fmt.Sprintf("Risk level: %s (likelihood=%d, impact=%d)", riskLevel, req.Likelihood, req.Impact),
|
||||||
|
})
|
||||||
|
|
||||||
|
// If notification is required, update authority notification status
|
||||||
|
if notificationRequired && incident.AuthorityNotification != nil {
|
||||||
|
incident.AuthorityNotification.Status = incidents.NotificationStatusPending
|
||||||
|
h.store.UpdateAuthorityNotification(c.Request.Context(), id, incident.AuthorityNotification)
|
||||||
|
|
||||||
|
// Update status to notification_required
|
||||||
|
incident.Status = incidents.IncidentStatusNotificationRequired
|
||||||
|
h.store.UpdateIncident(c.Request.Context(), incident)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"risk_assessment": assessment,
|
||||||
|
"notification_required": notificationRequired,
|
||||||
|
"incident_status": incident.Status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Authority Notification (Art. 33)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SubmitAuthorityNotification submits the supervisory authority notification
|
||||||
|
// POST /sdk/v1/incidents/:id/authority-notification
|
||||||
|
func (h *IncidentHandlers) SubmitAuthorityNotification(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.SubmitAuthorityNotificationRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
// Preserve existing deadline
|
||||||
|
deadline := incidents.Calculate72hDeadline(incident.DetectedAt)
|
||||||
|
if incident.AuthorityNotification != nil {
|
||||||
|
deadline = incident.AuthorityNotification.Deadline
|
||||||
|
}
|
||||||
|
|
||||||
|
notification := &incidents.AuthorityNotification{
|
||||||
|
Status: incidents.NotificationStatusSent,
|
||||||
|
Deadline: deadline,
|
||||||
|
SubmittedAt: &now,
|
||||||
|
AuthorityName: req.AuthorityName,
|
||||||
|
ReferenceNumber: req.ReferenceNumber,
|
||||||
|
ContactPerson: req.ContactPerson,
|
||||||
|
Notes: req.Notes,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateAuthorityNotification(c.Request.Context(), id, notification); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update incident status
|
||||||
|
incident.Status = incidents.IncidentStatusNotificationSent
|
||||||
|
h.store.UpdateIncident(c.Request.Context(), incident)
|
||||||
|
|
||||||
|
// Add timeline entry
|
||||||
|
h.store.AddTimelineEntry(c.Request.Context(), id, incidents.TimelineEntry{
|
||||||
|
Timestamp: now,
|
||||||
|
Action: "authority_notified",
|
||||||
|
UserID: userID,
|
||||||
|
Details: "Authority notification submitted to " + req.AuthorityName,
|
||||||
|
})
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"authority_notification": notification,
|
||||||
|
"submitted_within_72h": now.Before(deadline),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Data Subject Notification (Art. 34)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// NotifyDataSubjects submits the data subject notification
|
||||||
|
// POST /sdk/v1/incidents/:id/data-subject-notification
|
||||||
|
func (h *IncidentHandlers) NotifyDataSubjects(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.NotifyDataSubjectsRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
affectedCount := req.AffectedCount
|
||||||
|
if affectedCount == 0 {
|
||||||
|
affectedCount = incident.AffectedDataSubjectCount
|
||||||
|
}
|
||||||
|
|
||||||
|
notification := &incidents.DataSubjectNotification{
|
||||||
|
Required: true,
|
||||||
|
Status: incidents.NotificationStatusSent,
|
||||||
|
SentAt: &now,
|
||||||
|
AffectedCount: affectedCount,
|
||||||
|
NotificationText: req.NotificationText,
|
||||||
|
Channel: req.Channel,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateDataSubjectNotification(c.Request.Context(), id, notification); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add timeline entry
|
||||||
|
h.store.AddTimelineEntry(c.Request.Context(), id, incidents.TimelineEntry{
|
||||||
|
Timestamp: now,
|
||||||
|
Action: "data_subjects_notified",
|
||||||
|
UserID: userID,
|
||||||
|
Details: "Data subjects notified via " + req.Channel + " (" + fmt.Sprintf("%d", affectedCount) + " affected)",
|
||||||
|
})
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"data_subject_notification": notification,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Measures
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddMeasure adds a corrective measure to an incident
|
||||||
|
// POST /sdk/v1/incidents/:id/measures
|
||||||
|
func (h *IncidentHandlers) AddMeasure(c *gin.Context) {
|
||||||
|
incidentID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify incident exists
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), incidentID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.AddMeasureRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
measure := &incidents.IncidentMeasure{
|
||||||
|
IncidentID: incidentID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
MeasureType: req.MeasureType,
|
||||||
|
Status: incidents.MeasureStatusPlanned,
|
||||||
|
Responsible: req.Responsible,
|
||||||
|
DueDate: req.DueDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.AddMeasure(c.Request.Context(), measure); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add timeline entry
|
||||||
|
h.store.AddTimelineEntry(c.Request.Context(), incidentID, incidents.TimelineEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "measure_added",
|
||||||
|
UserID: userID,
|
||||||
|
Details: "Measure added: " + req.Title + " (" + string(req.MeasureType) + ")",
|
||||||
|
})
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"measure": measure})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMeasure updates a measure
|
||||||
|
// PUT /sdk/v1/incidents/measures/:measureId
|
||||||
|
func (h *IncidentHandlers) UpdateMeasure(c *gin.Context) {
|
||||||
|
measureID, err := uuid.Parse(c.Param("measureId"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid measure ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
MeasureType incidents.MeasureType `json:"measure_type,omitempty"`
|
||||||
|
Status incidents.MeasureStatus `json:"status,omitempty"`
|
||||||
|
Responsible string `json:"responsible,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
measure := &incidents.IncidentMeasure{
|
||||||
|
ID: measureID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
MeasureType: req.MeasureType,
|
||||||
|
Status: req.Status,
|
||||||
|
Responsible: req.Responsible,
|
||||||
|
DueDate: req.DueDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateMeasure(c.Request.Context(), measure); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"measure": measure})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteMeasure marks a measure as completed
|
||||||
|
// POST /sdk/v1/incidents/measures/:measureId/complete
|
||||||
|
func (h *IncidentHandlers) CompleteMeasure(c *gin.Context) {
|
||||||
|
measureID, err := uuid.Parse(c.Param("measureId"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid measure ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CompleteMeasure(c.Request.Context(), measureID); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "measure completed"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Timeline
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddTimelineEntry adds a timeline entry to an incident
|
||||||
|
// POST /sdk/v1/incidents/:id/timeline
|
||||||
|
func (h *IncidentHandlers) AddTimelineEntry(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.AddTimelineEntryRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
entry := incidents.TimelineEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: req.Action,
|
||||||
|
UserID: userID,
|
||||||
|
Details: req.Details,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.AddTimelineEntry(c.Request.Context(), id, entry); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"timeline_entry": entry})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Close Incident
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CloseIncident closes an incident with root cause analysis
|
||||||
|
// POST /sdk/v1/incidents/:id/close
|
||||||
|
func (h *IncidentHandlers) CloseIncident(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid incident ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
incident, err := h.store.GetIncident(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if incident == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "incident not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req incidents.CloseIncidentRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
if err := h.store.CloseIncident(c.Request.Context(), id, req.RootCause, req.LessonsLearned); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add timeline entry
|
||||||
|
h.store.AddTimelineEntry(c.Request.Context(), id, incidents.TimelineEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "incident_closed",
|
||||||
|
UserID: userID,
|
||||||
|
Details: "Incident closed. Root cause: " + req.RootCause,
|
||||||
|
})
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"message": "incident closed",
|
||||||
|
"root_cause": req.RootCause,
|
||||||
|
"lessons_learned": req.LessonsLearned,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns aggregated incident statistics
|
||||||
|
// GET /sdk/v1/incidents/statistics
|
||||||
|
func (h *IncidentHandlers) GetStatistics(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
stats, err := h.store.GetStatistics(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, stats)
|
||||||
|
}
|
||||||
|
|
||||||
115
ai-compliance-sdk/internal/api/handlers/industry_handlers.go
Normal file
115
ai-compliance-sdk/internal/api/handlers/industry_handlers.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/industry"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IndustryHandlers handles industry-specific compliance template requests.
|
||||||
|
// All data is static (embedded Go structs), so no store/database is needed.
|
||||||
|
type IndustryHandlers struct{}
|
||||||
|
|
||||||
|
// NewIndustryHandlers creates new industry handlers
|
||||||
|
func NewIndustryHandlers() *IndustryHandlers {
|
||||||
|
return &IndustryHandlers{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Industry Template Endpoints
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ListIndustries returns a summary list of all available industry templates.
|
||||||
|
// GET /sdk/v1/industries
|
||||||
|
func (h *IndustryHandlers) ListIndustries(c *gin.Context) {
|
||||||
|
templates := industry.GetAllTemplates()
|
||||||
|
|
||||||
|
summaries := make([]industry.IndustrySummary, 0, len(templates))
|
||||||
|
for _, t := range templates {
|
||||||
|
summaries = append(summaries, industry.IndustrySummary{
|
||||||
|
Slug: t.Slug,
|
||||||
|
Name: t.Name,
|
||||||
|
Description: t.Description,
|
||||||
|
Icon: t.Icon,
|
||||||
|
RegulationCount: len(t.Regulations),
|
||||||
|
TemplateCount: len(t.VVTTemplates),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, industry.IndustryListResponse{
|
||||||
|
Industries: summaries,
|
||||||
|
Total: len(summaries),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIndustry returns the full industry template for a given slug.
|
||||||
|
// GET /sdk/v1/industries/:slug
|
||||||
|
func (h *IndustryHandlers) GetIndustry(c *gin.Context) {
|
||||||
|
slug := c.Param("slug")
|
||||||
|
|
||||||
|
tmpl := industry.GetTemplateBySlug(slug)
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, tmpl)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVVTTemplates returns only the VVT templates for a given industry.
|
||||||
|
// GET /sdk/v1/industries/:slug/vvt-templates
|
||||||
|
func (h *IndustryHandlers) GetVVTTemplates(c *gin.Context) {
|
||||||
|
slug := c.Param("slug")
|
||||||
|
|
||||||
|
tmpl := industry.GetTemplateBySlug(slug)
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"slug": tmpl.Slug,
|
||||||
|
"industry": tmpl.Name,
|
||||||
|
"vvt_templates": tmpl.VVTTemplates,
|
||||||
|
"total": len(tmpl.VVTTemplates),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTOMRecommendations returns only the TOM recommendations for a given industry.
|
||||||
|
// GET /sdk/v1/industries/:slug/tom-recommendations
|
||||||
|
func (h *IndustryHandlers) GetTOMRecommendations(c *gin.Context) {
|
||||||
|
slug := c.Param("slug")
|
||||||
|
|
||||||
|
tmpl := industry.GetTemplateBySlug(slug)
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"slug": tmpl.Slug,
|
||||||
|
"industry": tmpl.Name,
|
||||||
|
"tom_recommendations": tmpl.TOMRecommendations,
|
||||||
|
"total": len(tmpl.TOMRecommendations),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRiskScenarios returns only the risk scenarios for a given industry.
|
||||||
|
// GET /sdk/v1/industries/:slug/risk-scenarios
|
||||||
|
func (h *IndustryHandlers) GetRiskScenarios(c *gin.Context) {
|
||||||
|
slug := c.Param("slug")
|
||||||
|
|
||||||
|
tmpl := industry.GetTemplateBySlug(slug)
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "industry template not found", "slug": slug})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"slug": tmpl.Slug,
|
||||||
|
"industry": tmpl.Name,
|
||||||
|
"risk_scenarios": tmpl.RiskScenarios,
|
||||||
|
"total": len(tmpl.RiskScenarios),
|
||||||
|
})
|
||||||
|
}
|
||||||
268
ai-compliance-sdk/internal/api/handlers/multitenant_handlers.go
Normal file
268
ai-compliance-sdk/internal/api/handlers/multitenant_handlers.go
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/multitenant"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MultiTenantHandlers handles multi-tenant administration endpoints.
|
||||||
|
type MultiTenantHandlers struct {
|
||||||
|
store *multitenant.Store
|
||||||
|
rbacStore *rbac.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMultiTenantHandlers creates new multi-tenant handlers.
|
||||||
|
func NewMultiTenantHandlers(store *multitenant.Store, rbacStore *rbac.Store) *MultiTenantHandlers {
|
||||||
|
return &MultiTenantHandlers{
|
||||||
|
store: store,
|
||||||
|
rbacStore: rbacStore,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOverview returns all tenants with compliance scores and module highlights.
|
||||||
|
// GET /sdk/v1/multi-tenant/overview
|
||||||
|
func (h *MultiTenantHandlers) GetOverview(c *gin.Context) {
|
||||||
|
overview, err := h.store.GetOverview(c.Request.Context())
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, overview)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTenantDetail returns detailed compliance info for one tenant.
|
||||||
|
// GET /sdk/v1/multi-tenant/tenants/:id
|
||||||
|
func (h *MultiTenantHandlers) GetTenantDetail(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
detail, err := h.store.GetTenantDetail(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, detail)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTenant creates a new tenant with default setup.
|
||||||
|
// It creates the tenant via the RBAC store and then creates a default "main" namespace.
|
||||||
|
// POST /sdk/v1/multi-tenant/tenants
|
||||||
|
func (h *MultiTenantHandlers) CreateTenant(c *gin.Context) {
|
||||||
|
var req multitenant.CreateTenantRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the tenant from the request
|
||||||
|
tenant := &rbac.Tenant{
|
||||||
|
Name: req.Name,
|
||||||
|
Slug: req.Slug,
|
||||||
|
MaxUsers: req.MaxUsers,
|
||||||
|
LLMQuotaMonthly: req.LLMQuotaMonthly,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create tenant via RBAC store (assigns ID, timestamps, defaults)
|
||||||
|
if err := h.rbacStore.CreateTenant(c.Request.Context(), tenant); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create default "main" namespace for the new tenant
|
||||||
|
defaultNamespace := &rbac.Namespace{
|
||||||
|
TenantID: tenant.ID,
|
||||||
|
Name: "Main",
|
||||||
|
Slug: "main",
|
||||||
|
}
|
||||||
|
if err := h.rbacStore.CreateNamespace(c.Request.Context(), defaultNamespace); err != nil {
|
||||||
|
// Tenant was created successfully but namespace creation failed.
|
||||||
|
// Log and continue -- the tenant is still usable.
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"tenant": tenant,
|
||||||
|
"warning": "tenant created but default namespace creation failed: " + err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"tenant": tenant,
|
||||||
|
"namespace": defaultNamespace,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTenant performs a partial update of tenant settings.
|
||||||
|
// Only non-nil fields in the request body are applied.
|
||||||
|
// PUT /sdk/v1/multi-tenant/tenants/:id
|
||||||
|
func (h *MultiTenantHandlers) UpdateTenant(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req multitenant.UpdateTenantRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch the existing tenant so we can apply partial updates
|
||||||
|
tenant, err := h.rbacStore.GetTenant(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply only the fields that were provided
|
||||||
|
if req.Name != nil {
|
||||||
|
tenant.Name = *req.Name
|
||||||
|
}
|
||||||
|
if req.MaxUsers != nil {
|
||||||
|
tenant.MaxUsers = *req.MaxUsers
|
||||||
|
}
|
||||||
|
if req.LLMQuotaMonthly != nil {
|
||||||
|
tenant.LLMQuotaMonthly = *req.LLMQuotaMonthly
|
||||||
|
}
|
||||||
|
if req.Status != nil {
|
||||||
|
tenant.Status = rbac.TenantStatus(*req.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.rbacStore.UpdateTenant(c.Request.Context(), tenant); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, tenant)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListNamespaces returns all namespaces for a specific tenant.
|
||||||
|
// GET /sdk/v1/multi-tenant/tenants/:id/namespaces
|
||||||
|
func (h *MultiTenantHandlers) ListNamespaces(c *gin.Context) {
|
||||||
|
tenantID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
namespaces, err := h.rbacStore.ListNamespaces(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"namespaces": namespaces,
|
||||||
|
"total": len(namespaces),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNamespace creates a new namespace within a tenant.
|
||||||
|
// POST /sdk/v1/multi-tenant/tenants/:id/namespaces
|
||||||
|
func (h *MultiTenantHandlers) CreateNamespace(c *gin.Context) {
|
||||||
|
tenantID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the tenant exists
|
||||||
|
_, err = h.rbacStore.GetTenant(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req multitenant.CreateNamespaceRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace := &rbac.Namespace{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Name: req.Name,
|
||||||
|
Slug: req.Slug,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply optional fields if provided
|
||||||
|
if req.IsolationLevel != "" {
|
||||||
|
namespace.IsolationLevel = rbac.IsolationLevel(req.IsolationLevel)
|
||||||
|
}
|
||||||
|
if req.DataClassification != "" {
|
||||||
|
namespace.DataClassification = rbac.DataClassification(req.DataClassification)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.rbacStore.CreateNamespace(c.Request.Context(), namespace); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, namespace)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SwitchTenant returns the tenant info needed for the frontend to switch context.
|
||||||
|
// The caller provides a tenant_id and receives back the tenant details needed
|
||||||
|
// to update the frontend's active tenant state.
|
||||||
|
// POST /sdk/v1/multi-tenant/switch
|
||||||
|
func (h *MultiTenantHandlers) SwitchTenant(c *gin.Context) {
|
||||||
|
var req multitenant.SwitchTenantRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID, err := uuid.Parse(req.TenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenant, err := h.rbacStore.GetTenant(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "tenant not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the tenant is active
|
||||||
|
if tenant.Status != rbac.TenantStatusActive {
|
||||||
|
c.JSON(http.StatusForbidden, gin.H{
|
||||||
|
"error": "tenant not active",
|
||||||
|
"status": string(tenant.Status),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get namespaces for the tenant so the frontend can populate namespace selectors
|
||||||
|
namespaces, err := h.rbacStore.ListNamespaces(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
// Non-fatal: return tenant info without namespaces
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"tenant": multitenant.SwitchTenantResponse{
|
||||||
|
TenantID: tenant.ID,
|
||||||
|
TenantName: tenant.Name,
|
||||||
|
TenantSlug: tenant.Slug,
|
||||||
|
Status: string(tenant.Status),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"tenant": multitenant.SwitchTenantResponse{
|
||||||
|
TenantID: tenant.ID,
|
||||||
|
TenantName: tenant.Name,
|
||||||
|
TenantSlug: tenant.Slug,
|
||||||
|
Status: string(tenant.Status),
|
||||||
|
},
|
||||||
|
"namespaces": namespaces,
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ReportingHandlers struct {
|
||||||
|
store *reporting.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewReportingHandlers(store *reporting.Store) *ReportingHandlers {
|
||||||
|
return &ReportingHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetExecutiveReport generates a comprehensive compliance report
|
||||||
|
// GET /sdk/v1/reporting/executive
|
||||||
|
func (h *ReportingHandlers) GetExecutiveReport(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, report)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetComplianceScore returns just the overall compliance score
|
||||||
|
// GET /sdk/v1/reporting/score
|
||||||
|
func (h *ReportingHandlers) GetComplianceScore(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"compliance_score": report.ComplianceScore,
|
||||||
|
"risk_level": report.RiskOverview.OverallLevel,
|
||||||
|
"generated_at": report.GeneratedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUpcomingDeadlines returns deadlines across all modules
|
||||||
|
// GET /sdk/v1/reporting/deadlines
|
||||||
|
func (h *ReportingHandlers) GetUpcomingDeadlines(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"deadlines": report.UpcomingDeadlines,
|
||||||
|
"total": len(report.UpcomingDeadlines),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRiskOverview returns the aggregated risk assessment
|
||||||
|
// GET /sdk/v1/reporting/risks
|
||||||
|
func (h *ReportingHandlers) GetRiskOverview(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
report, err := h.store.GenerateReport(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, report.RiskOverview)
|
||||||
|
}
|
||||||
631
ai-compliance-sdk/internal/api/handlers/sso_handlers.go
Normal file
631
ai-compliance-sdk/internal/api/handlers/sso_handlers.go
Normal file
@@ -0,0 +1,631 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/sso"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/golang-jwt/jwt/v5"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SSOHandlers handles SSO-related HTTP requests.
|
||||||
|
type SSOHandlers struct {
|
||||||
|
store *sso.Store
|
||||||
|
jwtSecret string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSSOHandlers creates new SSO handlers.
|
||||||
|
func NewSSOHandlers(store *sso.Store, jwtSecret string) *SSOHandlers {
|
||||||
|
return &SSOHandlers{store: store, jwtSecret: jwtSecret}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// SSO Configuration CRUD
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateConfig creates a new SSO configuration for the tenant.
|
||||||
|
// POST /sdk/v1/sso/configs
|
||||||
|
func (h *SSOHandlers) CreateConfig(c *gin.Context) {
|
||||||
|
var req sso.CreateSSOConfigRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
cfg, err := h.store.CreateConfig(c.Request.Context(), tenantID, &req)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"config": cfg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListConfigs lists all SSO configurations for the tenant.
|
||||||
|
// GET /sdk/v1/sso/configs
|
||||||
|
func (h *SSOHandlers) ListConfigs(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
configs, err := h.store.ListConfigs(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"configs": configs,
|
||||||
|
"total": len(configs),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfig retrieves an SSO configuration by ID.
|
||||||
|
// GET /sdk/v1/sso/configs/:id
|
||||||
|
func (h *SSOHandlers) GetConfig(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
configID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, err := h.store.GetConfig(c.Request.Context(), tenantID, configID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if cfg == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "sso configuration not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"config": cfg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateConfig updates an SSO configuration.
|
||||||
|
// PUT /sdk/v1/sso/configs/:id
|
||||||
|
func (h *SSOHandlers) UpdateConfig(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
configID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req sso.UpdateSSOConfigRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, err := h.store.UpdateConfig(c.Request.Context(), tenantID, configID, &req)
|
||||||
|
if err != nil {
|
||||||
|
if err.Error() == "sso configuration not found" {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"config": cfg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteConfig deletes an SSO configuration.
|
||||||
|
// DELETE /sdk/v1/sso/configs/:id
|
||||||
|
func (h *SSOHandlers) DeleteConfig(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
configID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid config ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.DeleteConfig(c.Request.Context(), tenantID, configID); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "sso configuration deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// SSO Users
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ListUsers lists all SSO-provisioned users for the tenant.
|
||||||
|
// GET /sdk/v1/sso/users
|
||||||
|
func (h *SSOHandlers) ListUsers(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
users, err := h.store.ListUsers(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"users": users,
|
||||||
|
"total": len(users),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// OIDC Flow
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// InitiateOIDCLogin initiates the OIDC authorization code flow.
|
||||||
|
// It looks up the enabled SSO config for the tenant, builds the authorization
|
||||||
|
// URL, sets a state cookie, and redirects the user to the IdP.
|
||||||
|
// GET /sdk/v1/sso/oidc/login
|
||||||
|
func (h *SSOHandlers) InitiateOIDCLogin(c *gin.Context) {
|
||||||
|
// Resolve tenant ID from query param
|
||||||
|
tenantIDStr := c.Query("tenant_id")
|
||||||
|
if tenantIDStr == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant_id query parameter is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID, err := uuid.Parse(tenantIDStr)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant_id"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look up the enabled SSO config
|
||||||
|
cfg, err := h.store.GetEnabledConfig(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if cfg == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "no enabled SSO configuration found for this tenant"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if cfg.ProviderType != sso.ProviderTypeOIDC {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "SSO configuration is not OIDC"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discover the authorization endpoint
|
||||||
|
discoveryURL := strings.TrimSuffix(cfg.OIDCIssuerURL, "/") + "/.well-known/openid-configuration"
|
||||||
|
authEndpoint, _, _, err := discoverOIDCEndpoints(discoveryURL)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("OIDC discovery failed: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate state parameter (random bytes + tenant_id for correlation)
|
||||||
|
stateBytes := make([]byte, 32)
|
||||||
|
if _, err := rand.Read(stateBytes); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate state"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
state := base64.URLEncoding.EncodeToString(stateBytes) + "." + tenantID.String()
|
||||||
|
|
||||||
|
// Generate nonce
|
||||||
|
nonceBytes := make([]byte, 16)
|
||||||
|
if _, err := rand.Read(nonceBytes); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate nonce"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
nonce := base64.URLEncoding.EncodeToString(nonceBytes)
|
||||||
|
|
||||||
|
// Build authorization URL
|
||||||
|
scopes := cfg.OIDCScopes
|
||||||
|
if len(scopes) == 0 {
|
||||||
|
scopes = []string{"openid", "profile", "email"}
|
||||||
|
}
|
||||||
|
|
||||||
|
params := url.Values{
|
||||||
|
"client_id": {cfg.OIDCClientID},
|
||||||
|
"redirect_uri": {cfg.OIDCRedirectURI},
|
||||||
|
"response_type": {"code"},
|
||||||
|
"scope": {strings.Join(scopes, " ")},
|
||||||
|
"state": {state},
|
||||||
|
"nonce": {nonce},
|
||||||
|
}
|
||||||
|
|
||||||
|
authURL := authEndpoint + "?" + params.Encode()
|
||||||
|
|
||||||
|
// Set state cookie for CSRF protection (HttpOnly, 10 min expiry)
|
||||||
|
c.SetCookie("sso_state", state, 600, "/", "", true, true)
|
||||||
|
c.SetCookie("sso_nonce", nonce, 600, "/", "", true, true)
|
||||||
|
|
||||||
|
c.Redirect(http.StatusFound, authURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleOIDCCallback handles the OIDC authorization code callback from the IdP.
|
||||||
|
// It validates the state, exchanges the code for tokens, extracts user info,
|
||||||
|
// performs JIT user provisioning, and issues a JWT.
|
||||||
|
// GET /sdk/v1/sso/oidc/callback
|
||||||
|
func (h *SSOHandlers) HandleOIDCCallback(c *gin.Context) {
|
||||||
|
// Check for errors from the IdP
|
||||||
|
if errParam := c.Query("error"); errParam != "" {
|
||||||
|
errDesc := c.Query("error_description")
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{
|
||||||
|
"error": errParam,
|
||||||
|
"description": errDesc,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
code := c.Query("code")
|
||||||
|
stateParam := c.Query("state")
|
||||||
|
if code == "" || stateParam == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "missing code or state parameter"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate state cookie
|
||||||
|
stateCookie, err := c.Cookie("sso_state")
|
||||||
|
if err != nil || stateCookie != stateParam {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid state parameter (CSRF check failed)"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract tenant ID from state
|
||||||
|
parts := strings.SplitN(stateParam, ".", 2)
|
||||||
|
if len(parts) != 2 {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "malformed state parameter"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tenantID, err := uuid.Parse(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant_id in state"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look up the enabled SSO config
|
||||||
|
cfg, err := h.store.GetEnabledConfig(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if cfg == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "no enabled SSO configuration found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Discover OIDC endpoints
|
||||||
|
discoveryURL := strings.TrimSuffix(cfg.OIDCIssuerURL, "/") + "/.well-known/openid-configuration"
|
||||||
|
_, tokenEndpoint, userInfoEndpoint, err := discoverOIDCEndpoints(discoveryURL)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("OIDC discovery failed: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exchange authorization code for tokens
|
||||||
|
tokenResp, err := exchangeCodeForTokens(tokenEndpoint, code, cfg.OIDCClientID, cfg.OIDCClientSecret, cfg.OIDCRedirectURI)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("token exchange failed: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract user claims from ID token or UserInfo endpoint
|
||||||
|
claims, err := extractUserClaims(tokenResp, userInfoEndpoint)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("failed to extract user claims: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
sub := getStringClaim(claims, "sub")
|
||||||
|
email := getStringClaim(claims, "email")
|
||||||
|
name := getStringClaim(claims, "name")
|
||||||
|
groups := getStringSliceClaim(claims, "groups")
|
||||||
|
|
||||||
|
if sub == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "ID token missing 'sub' claim"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if email == "" {
|
||||||
|
email = sub
|
||||||
|
}
|
||||||
|
if name == "" {
|
||||||
|
name = email
|
||||||
|
}
|
||||||
|
|
||||||
|
// JIT provision the user
|
||||||
|
user, err := h.store.UpsertUser(c.Request.Context(), tenantID, cfg.ID, sub, email, name, groups)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("user provisioning failed: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine roles from role mapping
|
||||||
|
roles := resolveRoles(cfg, groups)
|
||||||
|
|
||||||
|
// Generate JWT
|
||||||
|
ssoClaims := sso.SSOClaims{
|
||||||
|
UserID: user.ID,
|
||||||
|
TenantID: tenantID,
|
||||||
|
Email: user.Email,
|
||||||
|
DisplayName: user.DisplayName,
|
||||||
|
Roles: roles,
|
||||||
|
SSOConfigID: cfg.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
jwtToken, err := h.generateJWT(ssoClaims)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("JWT generation failed: %v", err)})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear state cookies
|
||||||
|
c.SetCookie("sso_state", "", -1, "/", "", true, true)
|
||||||
|
c.SetCookie("sso_nonce", "", -1, "/", "", true, true)
|
||||||
|
|
||||||
|
// Return JWT as JSON (the frontend can also handle redirect)
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"token": jwtToken,
|
||||||
|
"user": user,
|
||||||
|
"roles": roles,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// JWT Generation
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// generateJWT creates a signed JWT token containing the SSO claims.
|
||||||
|
func (h *SSOHandlers) generateJWT(claims sso.SSOClaims) (string, error) {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
expiry := now.Add(24 * time.Hour)
|
||||||
|
|
||||||
|
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
|
||||||
|
"user_id": claims.UserID.String(),
|
||||||
|
"tenant_id": claims.TenantID.String(),
|
||||||
|
"email": claims.Email,
|
||||||
|
"display_name": claims.DisplayName,
|
||||||
|
"roles": claims.Roles,
|
||||||
|
"sso_config_id": claims.SSOConfigID.String(),
|
||||||
|
"iss": "ai-compliance-sdk",
|
||||||
|
"iat": now.Unix(),
|
||||||
|
"exp": expiry.Unix(),
|
||||||
|
})
|
||||||
|
|
||||||
|
tokenString, err := token.SignedString([]byte(h.jwtSecret))
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to sign JWT: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokenString, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// OIDC Discovery & Token Exchange (manual HTTP, no external OIDC library)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// oidcDiscoveryResponse holds the relevant fields from the OIDC discovery document.
|
||||||
|
type oidcDiscoveryResponse struct {
|
||||||
|
AuthorizationEndpoint string `json:"authorization_endpoint"`
|
||||||
|
TokenEndpoint string `json:"token_endpoint"`
|
||||||
|
UserinfoEndpoint string `json:"userinfo_endpoint"`
|
||||||
|
JwksURI string `json:"jwks_uri"`
|
||||||
|
Issuer string `json:"issuer"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// discoverOIDCEndpoints fetches the OIDC discovery document and returns
|
||||||
|
// the authorization, token, and userinfo endpoints.
|
||||||
|
func discoverOIDCEndpoints(discoveryURL string) (authEndpoint, tokenEndpoint, userInfoEndpoint string, err error) {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
resp, err := client.Get(discoveryURL)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", "", fmt.Errorf("failed to fetch discovery document: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
return "", "", "", fmt.Errorf("discovery endpoint returned %d: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
var discovery oidcDiscoveryResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&discovery); err != nil {
|
||||||
|
return "", "", "", fmt.Errorf("failed to decode discovery document: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if discovery.AuthorizationEndpoint == "" {
|
||||||
|
return "", "", "", fmt.Errorf("discovery document missing authorization_endpoint")
|
||||||
|
}
|
||||||
|
if discovery.TokenEndpoint == "" {
|
||||||
|
return "", "", "", fmt.Errorf("discovery document missing token_endpoint")
|
||||||
|
}
|
||||||
|
|
||||||
|
return discovery.AuthorizationEndpoint, discovery.TokenEndpoint, discovery.UserinfoEndpoint, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// oidcTokenResponse holds the response from the OIDC token endpoint.
|
||||||
|
type oidcTokenResponse struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
IDToken string `json:"id_token"`
|
||||||
|
TokenType string `json:"token_type"`
|
||||||
|
ExpiresIn int `json:"expires_in"`
|
||||||
|
RefreshToken string `json:"refresh_token,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// exchangeCodeForTokens exchanges an authorization code for tokens at the token endpoint.
|
||||||
|
func exchangeCodeForTokens(tokenEndpoint, code, clientID, clientSecret, redirectURI string) (*oidcTokenResponse, error) {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
data := url.Values{
|
||||||
|
"grant_type": {"authorization_code"},
|
||||||
|
"code": {code},
|
||||||
|
"client_id": {clientID},
|
||||||
|
"redirect_uri": {redirectURI},
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", tokenEndpoint, strings.NewReader(data.Encode()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create token request: %w", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
|
|
||||||
|
// Use client_secret_basic if provided
|
||||||
|
if clientSecret != "" {
|
||||||
|
req.SetBasicAuth(clientID, clientSecret)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("token request failed: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, fmt.Errorf("token endpoint returned %d: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
var tokenResp oidcTokenResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode token response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &tokenResp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractUserClaims extracts user claims from the ID token payload.
|
||||||
|
// If the ID token is unavailable or incomplete, it falls back to the UserInfo endpoint.
|
||||||
|
func extractUserClaims(tokenResp *oidcTokenResponse, userInfoEndpoint string) (map[string]interface{}, error) {
|
||||||
|
claims := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Try to decode ID token payload (without signature verification for claims extraction;
|
||||||
|
// in production, you should verify the signature using the JWKS endpoint)
|
||||||
|
if tokenResp.IDToken != "" {
|
||||||
|
parts := strings.Split(tokenResp.IDToken, ".")
|
||||||
|
if len(parts) == 3 {
|
||||||
|
payload, err := base64.RawURLEncoding.DecodeString(parts[1])
|
||||||
|
if err == nil {
|
||||||
|
if err := json.Unmarshal(payload, &claims); err == nil && claims["sub"] != nil {
|
||||||
|
return claims, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback to UserInfo endpoint
|
||||||
|
if userInfoEndpoint != "" && tokenResp.AccessToken != "" {
|
||||||
|
userClaims, err := fetchUserInfo(userInfoEndpoint, tokenResp.AccessToken)
|
||||||
|
if err == nil && userClaims["sub"] != nil {
|
||||||
|
return userClaims, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if claims["sub"] != nil {
|
||||||
|
return claims, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("could not extract user claims from ID token or UserInfo endpoint")
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchUserInfo calls the OIDC UserInfo endpoint with the access token.
|
||||||
|
func fetchUserInfo(userInfoEndpoint, accessToken string) (map[string]interface{}, error) {
|
||||||
|
client := &http.Client{Timeout: 10 * time.Second}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", userInfoEndpoint, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Authorization", "Bearer "+accessToken)
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("userinfo endpoint returned %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var claims map[string]interface{}
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&claims); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return claims, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Claim Extraction Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// getStringClaim extracts a string claim from a claims map.
|
||||||
|
func getStringClaim(claims map[string]interface{}, key string) string {
|
||||||
|
if v, ok := claims[key]; ok {
|
||||||
|
if s, ok := v.(string); ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// getStringSliceClaim extracts a string slice claim from a claims map.
|
||||||
|
func getStringSliceClaim(claims map[string]interface{}, key string) []string {
|
||||||
|
v, ok := claims[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch val := v.(type) {
|
||||||
|
case []interface{}:
|
||||||
|
result := make([]string, 0, len(val))
|
||||||
|
for _, item := range val {
|
||||||
|
if s, ok := item.(string); ok {
|
||||||
|
result = append(result, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
case []string:
|
||||||
|
return val
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveRoles maps SSO groups to internal roles using the config's role mapping.
|
||||||
|
// If no groups match, the default role is returned.
|
||||||
|
func resolveRoles(cfg *sso.SSOConfig, groups []string) []string {
|
||||||
|
if cfg.RoleMapping == nil || len(cfg.RoleMapping) == 0 {
|
||||||
|
if cfg.DefaultRoleID != nil {
|
||||||
|
return []string{cfg.DefaultRoleID.String()}
|
||||||
|
}
|
||||||
|
return []string{"compliance_user"}
|
||||||
|
}
|
||||||
|
|
||||||
|
roleSet := make(map[string]bool)
|
||||||
|
for _, group := range groups {
|
||||||
|
if role, ok := cfg.RoleMapping[group]; ok {
|
||||||
|
roleSet[role] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(roleSet) == 0 {
|
||||||
|
if cfg.DefaultRoleID != nil {
|
||||||
|
return []string{cfg.DefaultRoleID.String()}
|
||||||
|
}
|
||||||
|
return []string{"compliance_user"}
|
||||||
|
}
|
||||||
|
|
||||||
|
roles := make([]string, 0, len(roleSet))
|
||||||
|
for role := range roleSet {
|
||||||
|
roles = append(roles, role)
|
||||||
|
}
|
||||||
|
return roles
|
||||||
|
}
|
||||||
850
ai-compliance-sdk/internal/api/handlers/vendor_handlers.go
Normal file
850
ai-compliance-sdk/internal/api/handlers/vendor_handlers.go
Normal file
@@ -0,0 +1,850 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/vendor"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// VendorHandlers handles vendor-compliance HTTP requests
|
||||||
|
type VendorHandlers struct {
|
||||||
|
store *vendor.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewVendorHandlers creates new vendor handlers
|
||||||
|
func NewVendorHandlers(store *vendor.Store) *VendorHandlers {
|
||||||
|
return &VendorHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Vendor CRUD
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateVendor creates a new vendor
|
||||||
|
// POST /sdk/v1/vendors
|
||||||
|
func (h *VendorHandlers) CreateVendor(c *gin.Context) {
|
||||||
|
var req vendor.CreateVendorRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
v := &vendor.Vendor{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Name: req.Name,
|
||||||
|
LegalForm: req.LegalForm,
|
||||||
|
Country: req.Country,
|
||||||
|
Address: req.Address,
|
||||||
|
Website: req.Website,
|
||||||
|
ContactName: req.ContactName,
|
||||||
|
ContactEmail: req.ContactEmail,
|
||||||
|
ContactPhone: req.ContactPhone,
|
||||||
|
ContactDepartment: req.ContactDepartment,
|
||||||
|
Role: req.Role,
|
||||||
|
ServiceCategory: req.ServiceCategory,
|
||||||
|
ServiceDescription: req.ServiceDescription,
|
||||||
|
DataAccessLevel: req.DataAccessLevel,
|
||||||
|
ProcessingLocations: req.ProcessingLocations,
|
||||||
|
Certifications: req.Certifications,
|
||||||
|
ReviewFrequency: req.ReviewFrequency,
|
||||||
|
ProcessingActivityIDs: req.ProcessingActivityIDs,
|
||||||
|
TemplateID: req.TemplateID,
|
||||||
|
Status: vendor.VendorStatusActive,
|
||||||
|
CreatedBy: userID.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateVendor(c.Request.Context(), v); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"vendor": v})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListVendors lists all vendors for a tenant
|
||||||
|
// GET /sdk/v1/vendors
|
||||||
|
func (h *VendorHandlers) ListVendors(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
vendors, err := h.store.ListVendors(c.Request.Context(), tenantID.String())
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"vendors": vendors,
|
||||||
|
"total": len(vendors),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVendor retrieves a vendor by ID with contracts and findings
|
||||||
|
// GET /sdk/v1/vendors/:id
|
||||||
|
func (h *VendorHandlers) GetVendor(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
v, err := h.store.GetVendor(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if v == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "vendor not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
contracts, _ := h.store.ListContracts(c.Request.Context(), tenantID.String(), &id)
|
||||||
|
findings, _ := h.store.ListFindings(c.Request.Context(), tenantID.String(), &id, nil)
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"vendor": v,
|
||||||
|
"contracts": contracts,
|
||||||
|
"findings": findings,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateVendor updates a vendor
|
||||||
|
// PUT /sdk/v1/vendors/:id
|
||||||
|
func (h *VendorHandlers) UpdateVendor(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
v, err := h.store.GetVendor(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if v == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "vendor not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req vendor.UpdateVendorRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply non-nil fields
|
||||||
|
if req.Name != nil {
|
||||||
|
v.Name = *req.Name
|
||||||
|
}
|
||||||
|
if req.LegalForm != nil {
|
||||||
|
v.LegalForm = *req.LegalForm
|
||||||
|
}
|
||||||
|
if req.Country != nil {
|
||||||
|
v.Country = *req.Country
|
||||||
|
}
|
||||||
|
if req.Address != nil {
|
||||||
|
v.Address = req.Address
|
||||||
|
}
|
||||||
|
if req.Website != nil {
|
||||||
|
v.Website = *req.Website
|
||||||
|
}
|
||||||
|
if req.ContactName != nil {
|
||||||
|
v.ContactName = *req.ContactName
|
||||||
|
}
|
||||||
|
if req.ContactEmail != nil {
|
||||||
|
v.ContactEmail = *req.ContactEmail
|
||||||
|
}
|
||||||
|
if req.ContactPhone != nil {
|
||||||
|
v.ContactPhone = *req.ContactPhone
|
||||||
|
}
|
||||||
|
if req.ContactDepartment != nil {
|
||||||
|
v.ContactDepartment = *req.ContactDepartment
|
||||||
|
}
|
||||||
|
if req.Role != nil {
|
||||||
|
v.Role = *req.Role
|
||||||
|
}
|
||||||
|
if req.ServiceCategory != nil {
|
||||||
|
v.ServiceCategory = *req.ServiceCategory
|
||||||
|
}
|
||||||
|
if req.ServiceDescription != nil {
|
||||||
|
v.ServiceDescription = *req.ServiceDescription
|
||||||
|
}
|
||||||
|
if req.DataAccessLevel != nil {
|
||||||
|
v.DataAccessLevel = *req.DataAccessLevel
|
||||||
|
}
|
||||||
|
if req.ProcessingLocations != nil {
|
||||||
|
v.ProcessingLocations = req.ProcessingLocations
|
||||||
|
}
|
||||||
|
if req.Certifications != nil {
|
||||||
|
v.Certifications = req.Certifications
|
||||||
|
}
|
||||||
|
if req.InherentRiskScore != nil {
|
||||||
|
v.InherentRiskScore = req.InherentRiskScore
|
||||||
|
}
|
||||||
|
if req.ResidualRiskScore != nil {
|
||||||
|
v.ResidualRiskScore = req.ResidualRiskScore
|
||||||
|
}
|
||||||
|
if req.ManualRiskAdjustment != nil {
|
||||||
|
v.ManualRiskAdjustment = req.ManualRiskAdjustment
|
||||||
|
}
|
||||||
|
if req.ReviewFrequency != nil {
|
||||||
|
v.ReviewFrequency = *req.ReviewFrequency
|
||||||
|
}
|
||||||
|
if req.LastReviewDate != nil {
|
||||||
|
v.LastReviewDate = req.LastReviewDate
|
||||||
|
}
|
||||||
|
if req.NextReviewDate != nil {
|
||||||
|
v.NextReviewDate = req.NextReviewDate
|
||||||
|
}
|
||||||
|
if req.ProcessingActivityIDs != nil {
|
||||||
|
v.ProcessingActivityIDs = req.ProcessingActivityIDs
|
||||||
|
}
|
||||||
|
if req.Status != nil {
|
||||||
|
v.Status = *req.Status
|
||||||
|
}
|
||||||
|
if req.TemplateID != nil {
|
||||||
|
v.TemplateID = req.TemplateID
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateVendor(c.Request.Context(), v); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"vendor": v})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteVendor deletes a vendor
|
||||||
|
// DELETE /sdk/v1/vendors/:id
|
||||||
|
func (h *VendorHandlers) DeleteVendor(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
if err := h.store.DeleteVendor(c.Request.Context(), tenantID.String(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "vendor deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Contract CRUD
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateContract creates a new contract for a vendor
|
||||||
|
// POST /sdk/v1/vendors/contracts
|
||||||
|
func (h *VendorHandlers) CreateContract(c *gin.Context) {
|
||||||
|
var req vendor.CreateContractRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
contract := &vendor.Contract{
|
||||||
|
TenantID: tenantID,
|
||||||
|
VendorID: req.VendorID,
|
||||||
|
FileName: req.FileName,
|
||||||
|
OriginalName: req.OriginalName,
|
||||||
|
MimeType: req.MimeType,
|
||||||
|
FileSize: req.FileSize,
|
||||||
|
StoragePath: req.StoragePath,
|
||||||
|
DocumentType: req.DocumentType,
|
||||||
|
Parties: req.Parties,
|
||||||
|
EffectiveDate: req.EffectiveDate,
|
||||||
|
ExpirationDate: req.ExpirationDate,
|
||||||
|
AutoRenewal: req.AutoRenewal,
|
||||||
|
RenewalNoticePeriod: req.RenewalNoticePeriod,
|
||||||
|
Version: req.Version,
|
||||||
|
PreviousVersionID: req.PreviousVersionID,
|
||||||
|
ReviewStatus: "PENDING",
|
||||||
|
CreatedBy: userID.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if contract.Version == "" {
|
||||||
|
contract.Version = "1.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateContract(c.Request.Context(), contract); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"contract": contract})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListContracts lists contracts for a tenant
|
||||||
|
// GET /sdk/v1/vendors/contracts
|
||||||
|
func (h *VendorHandlers) ListContracts(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
var vendorID *string
|
||||||
|
if vid := c.Query("vendor_id"); vid != "" {
|
||||||
|
vendorID = &vid
|
||||||
|
}
|
||||||
|
|
||||||
|
contracts, err := h.store.ListContracts(c.Request.Context(), tenantID.String(), vendorID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"contracts": contracts,
|
||||||
|
"total": len(contracts),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContract retrieves a contract by ID
|
||||||
|
// GET /sdk/v1/vendors/contracts/:id
|
||||||
|
func (h *VendorHandlers) GetContract(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
contract, err := h.store.GetContract(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if contract == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "contract not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"contract": contract})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateContract updates a contract
|
||||||
|
// PUT /sdk/v1/vendors/contracts/:id
|
||||||
|
func (h *VendorHandlers) UpdateContract(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
contract, err := h.store.GetContract(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if contract == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "contract not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req vendor.UpdateContractRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.DocumentType != nil {
|
||||||
|
contract.DocumentType = *req.DocumentType
|
||||||
|
}
|
||||||
|
if req.Parties != nil {
|
||||||
|
contract.Parties = req.Parties
|
||||||
|
}
|
||||||
|
if req.EffectiveDate != nil {
|
||||||
|
contract.EffectiveDate = req.EffectiveDate
|
||||||
|
}
|
||||||
|
if req.ExpirationDate != nil {
|
||||||
|
contract.ExpirationDate = req.ExpirationDate
|
||||||
|
}
|
||||||
|
if req.AutoRenewal != nil {
|
||||||
|
contract.AutoRenewal = *req.AutoRenewal
|
||||||
|
}
|
||||||
|
if req.RenewalNoticePeriod != nil {
|
||||||
|
contract.RenewalNoticePeriod = *req.RenewalNoticePeriod
|
||||||
|
}
|
||||||
|
if req.ReviewStatus != nil {
|
||||||
|
contract.ReviewStatus = *req.ReviewStatus
|
||||||
|
}
|
||||||
|
if req.ReviewCompletedAt != nil {
|
||||||
|
contract.ReviewCompletedAt = req.ReviewCompletedAt
|
||||||
|
}
|
||||||
|
if req.ComplianceScore != nil {
|
||||||
|
contract.ComplianceScore = req.ComplianceScore
|
||||||
|
}
|
||||||
|
if req.Version != nil {
|
||||||
|
contract.Version = *req.Version
|
||||||
|
}
|
||||||
|
if req.ExtractedText != nil {
|
||||||
|
contract.ExtractedText = *req.ExtractedText
|
||||||
|
}
|
||||||
|
if req.PageCount != nil {
|
||||||
|
contract.PageCount = req.PageCount
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateContract(c.Request.Context(), contract); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"contract": contract})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteContract deletes a contract
|
||||||
|
// DELETE /sdk/v1/vendors/contracts/:id
|
||||||
|
func (h *VendorHandlers) DeleteContract(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
if err := h.store.DeleteContract(c.Request.Context(), tenantID.String(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "contract deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Finding CRUD
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateFinding creates a new compliance finding
|
||||||
|
// POST /sdk/v1/vendors/findings
|
||||||
|
func (h *VendorHandlers) CreateFinding(c *gin.Context) {
|
||||||
|
var req vendor.CreateFindingRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
finding := &vendor.Finding{
|
||||||
|
TenantID: tenantID,
|
||||||
|
VendorID: req.VendorID,
|
||||||
|
ContractID: req.ContractID,
|
||||||
|
FindingType: req.FindingType,
|
||||||
|
Category: req.Category,
|
||||||
|
Severity: req.Severity,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
Recommendation: req.Recommendation,
|
||||||
|
Citations: req.Citations,
|
||||||
|
Status: vendor.FindingStatusOpen,
|
||||||
|
Assignee: req.Assignee,
|
||||||
|
DueDate: req.DueDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateFinding(c.Request.Context(), finding); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"finding": finding})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListFindings lists findings for a tenant
|
||||||
|
// GET /sdk/v1/vendors/findings
|
||||||
|
func (h *VendorHandlers) ListFindings(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
var vendorID, contractID *string
|
||||||
|
if vid := c.Query("vendor_id"); vid != "" {
|
||||||
|
vendorID = &vid
|
||||||
|
}
|
||||||
|
if cid := c.Query("contract_id"); cid != "" {
|
||||||
|
contractID = &cid
|
||||||
|
}
|
||||||
|
|
||||||
|
findings, err := h.store.ListFindings(c.Request.Context(), tenantID.String(), vendorID, contractID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"findings": findings,
|
||||||
|
"total": len(findings),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetFinding retrieves a finding by ID
|
||||||
|
// GET /sdk/v1/vendors/findings/:id
|
||||||
|
func (h *VendorHandlers) GetFinding(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
finding, err := h.store.GetFinding(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if finding == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "finding not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"finding": finding})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFinding updates a finding
|
||||||
|
// PUT /sdk/v1/vendors/findings/:id
|
||||||
|
func (h *VendorHandlers) UpdateFinding(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
finding, err := h.store.GetFinding(c.Request.Context(), tenantID.String(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if finding == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "finding not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req vendor.UpdateFindingRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if req.FindingType != nil {
|
||||||
|
finding.FindingType = *req.FindingType
|
||||||
|
}
|
||||||
|
if req.Category != nil {
|
||||||
|
finding.Category = *req.Category
|
||||||
|
}
|
||||||
|
if req.Severity != nil {
|
||||||
|
finding.Severity = *req.Severity
|
||||||
|
}
|
||||||
|
if req.Title != nil {
|
||||||
|
finding.Title = *req.Title
|
||||||
|
}
|
||||||
|
if req.Description != nil {
|
||||||
|
finding.Description = *req.Description
|
||||||
|
}
|
||||||
|
if req.Recommendation != nil {
|
||||||
|
finding.Recommendation = *req.Recommendation
|
||||||
|
}
|
||||||
|
if req.Citations != nil {
|
||||||
|
finding.Citations = req.Citations
|
||||||
|
}
|
||||||
|
if req.Status != nil {
|
||||||
|
finding.Status = *req.Status
|
||||||
|
}
|
||||||
|
if req.Assignee != nil {
|
||||||
|
finding.Assignee = *req.Assignee
|
||||||
|
}
|
||||||
|
if req.DueDate != nil {
|
||||||
|
finding.DueDate = req.DueDate
|
||||||
|
}
|
||||||
|
if req.Resolution != nil {
|
||||||
|
finding.Resolution = *req.Resolution
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.UpdateFinding(c.Request.Context(), finding); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"finding": finding})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveFinding resolves a finding with a resolution description
|
||||||
|
// POST /sdk/v1/vendors/findings/:id/resolve
|
||||||
|
func (h *VendorHandlers) ResolveFinding(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
id := c.Param("id")
|
||||||
|
|
||||||
|
var req vendor.ResolveFindingRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.ResolveFinding(c.Request.Context(), tenantID.String(), id, req.Resolution, userID.String()); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "finding resolved"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Control Instance Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// UpsertControlInstance creates or updates a control instance
|
||||||
|
// POST /sdk/v1/vendors/controls
|
||||||
|
func (h *VendorHandlers) UpsertControlInstance(c *gin.Context) {
|
||||||
|
var req struct {
|
||||||
|
VendorID string `json:"vendor_id" binding:"required"`
|
||||||
|
ControlID string `json:"control_id" binding:"required"`
|
||||||
|
ControlDomain string `json:"control_domain"`
|
||||||
|
Status vendor.ControlStatus `json:"status" binding:"required"`
|
||||||
|
EvidenceIDs json.RawMessage `json:"evidence_ids,omitempty"`
|
||||||
|
Notes string `json:"notes,omitempty"`
|
||||||
|
NextAssessmentDate *time.Time `json:"next_assessment_date,omitempty"`
|
||||||
|
}
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
now := time.Now().UTC()
|
||||||
|
userIDStr := userID.String()
|
||||||
|
|
||||||
|
ci := &vendor.ControlInstance{
|
||||||
|
TenantID: tenantID,
|
||||||
|
ControlID: req.ControlID,
|
||||||
|
ControlDomain: req.ControlDomain,
|
||||||
|
Status: req.Status,
|
||||||
|
EvidenceIDs: req.EvidenceIDs,
|
||||||
|
Notes: req.Notes,
|
||||||
|
LastAssessedAt: &now,
|
||||||
|
LastAssessedBy: &userIDStr,
|
||||||
|
NextAssessmentDate: req.NextAssessmentDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse VendorID
|
||||||
|
vendorUUID, err := parseUUID(req.VendorID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid vendor_id"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ci.VendorID = vendorUUID
|
||||||
|
|
||||||
|
if err := h.store.UpsertControlInstance(c.Request.Context(), ci); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"control_instance": ci})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListControlInstances lists control instances for a vendor
|
||||||
|
// GET /sdk/v1/vendors/controls
|
||||||
|
func (h *VendorHandlers) ListControlInstances(c *gin.Context) {
|
||||||
|
vendorID := c.Query("vendor_id")
|
||||||
|
if vendorID == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "vendor_id query parameter is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
instances, err := h.store.ListControlInstances(c.Request.Context(), tenantID.String(), vendorID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"control_instances": instances,
|
||||||
|
"total": len(instances),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Template Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ListTemplates lists available templates
|
||||||
|
// GET /sdk/v1/vendors/templates
|
||||||
|
func (h *VendorHandlers) ListTemplates(c *gin.Context) {
|
||||||
|
templateType := c.DefaultQuery("type", "VENDOR")
|
||||||
|
|
||||||
|
var category, industry *string
|
||||||
|
if cat := c.Query("category"); cat != "" {
|
||||||
|
category = &cat
|
||||||
|
}
|
||||||
|
if ind := c.Query("industry"); ind != "" {
|
||||||
|
industry = &ind
|
||||||
|
}
|
||||||
|
|
||||||
|
templates, err := h.store.ListTemplates(c.Request.Context(), templateType, category, industry)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"templates": templates,
|
||||||
|
"total": len(templates),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTemplate retrieves a template by its template_id string
|
||||||
|
// GET /sdk/v1/vendors/templates/:templateId
|
||||||
|
func (h *VendorHandlers) GetTemplate(c *gin.Context) {
|
||||||
|
templateID := c.Param("templateId")
|
||||||
|
if templateID == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "template ID is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := h.store.GetTemplate(c.Request.Context(), templateID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "template not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"template": tmpl})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTemplate creates a custom template
|
||||||
|
// POST /sdk/v1/vendors/templates
|
||||||
|
func (h *VendorHandlers) CreateTemplate(c *gin.Context) {
|
||||||
|
var req vendor.CreateTemplateRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl := &vendor.Template{
|
||||||
|
TemplateType: req.TemplateType,
|
||||||
|
TemplateID: req.TemplateID,
|
||||||
|
Category: req.Category,
|
||||||
|
NameDE: req.NameDE,
|
||||||
|
NameEN: req.NameEN,
|
||||||
|
DescriptionDE: req.DescriptionDE,
|
||||||
|
DescriptionEN: req.DescriptionEN,
|
||||||
|
TemplateData: req.TemplateData,
|
||||||
|
Industry: req.Industry,
|
||||||
|
Tags: req.Tags,
|
||||||
|
IsSystem: req.IsSystem,
|
||||||
|
IsActive: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set tenant for custom (non-system) templates
|
||||||
|
if !req.IsSystem {
|
||||||
|
tid := rbac.GetTenantID(c).String()
|
||||||
|
tmpl.TenantID = &tid
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateTemplate(c.Request.Context(), tmpl); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"template": tmpl})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ApplyTemplate creates a vendor from a template
|
||||||
|
// POST /sdk/v1/vendors/templates/:templateId/apply
|
||||||
|
func (h *VendorHandlers) ApplyTemplate(c *gin.Context) {
|
||||||
|
templateID := c.Param("templateId")
|
||||||
|
if templateID == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "template ID is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := h.store.GetTemplate(c.Request.Context(), templateID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if tmpl == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "template not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse template_data to extract suggested vendor fields
|
||||||
|
var templateData struct {
|
||||||
|
ServiceCategory string `json:"service_category"`
|
||||||
|
SuggestedRole string `json:"suggested_role"`
|
||||||
|
DataAccessLevel string `json:"data_access_level"`
|
||||||
|
ReviewFrequency string `json:"review_frequency"`
|
||||||
|
Certifications json.RawMessage `json:"certifications"`
|
||||||
|
ProcessingLocations json.RawMessage `json:"processing_locations"`
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(tmpl.TemplateData, &templateData); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to parse template data"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional overrides from request body
|
||||||
|
var overrides struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Country string `json:"country"`
|
||||||
|
Website string `json:"website"`
|
||||||
|
ContactName string `json:"contact_name"`
|
||||||
|
ContactEmail string `json:"contact_email"`
|
||||||
|
}
|
||||||
|
c.ShouldBindJSON(&overrides)
|
||||||
|
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
v := &vendor.Vendor{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Name: overrides.Name,
|
||||||
|
Country: overrides.Country,
|
||||||
|
Website: overrides.Website,
|
||||||
|
ContactName: overrides.ContactName,
|
||||||
|
ContactEmail: overrides.ContactEmail,
|
||||||
|
Role: vendor.VendorRole(templateData.SuggestedRole),
|
||||||
|
ServiceCategory: templateData.ServiceCategory,
|
||||||
|
DataAccessLevel: templateData.DataAccessLevel,
|
||||||
|
ReviewFrequency: templateData.ReviewFrequency,
|
||||||
|
Certifications: templateData.Certifications,
|
||||||
|
ProcessingLocations: templateData.ProcessingLocations,
|
||||||
|
Status: vendor.VendorStatusActive,
|
||||||
|
TemplateID: &templateID,
|
||||||
|
CreatedBy: userID.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Name == "" {
|
||||||
|
v.Name = tmpl.NameDE
|
||||||
|
}
|
||||||
|
if v.Country == "" {
|
||||||
|
v.Country = "DE"
|
||||||
|
}
|
||||||
|
if v.Role == "" {
|
||||||
|
v.Role = vendor.VendorRoleProcessor
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateVendor(c.Request.Context(), v); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Increment template usage
|
||||||
|
_ = h.store.IncrementTemplateUsage(c.Request.Context(), templateID)
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"vendor": v,
|
||||||
|
"template_id": templateID,
|
||||||
|
"message": "vendor created from template",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns aggregated vendor statistics
|
||||||
|
// GET /sdk/v1/vendors/stats
|
||||||
|
func (h *VendorHandlers) GetStatistics(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
stats, err := h.store.GetVendorStats(c.Request.Context(), tenantID.String())
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, stats)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
func parseUUID(s string) (uuid.UUID, error) {
|
||||||
|
return uuid.Parse(s)
|
||||||
|
}
|
||||||
@@ -0,0 +1,538 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/whistleblower"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// WhistleblowerHandlers handles whistleblower HTTP requests
|
||||||
|
type WhistleblowerHandlers struct {
|
||||||
|
store *whistleblower.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWhistleblowerHandlers creates new whistleblower handlers
|
||||||
|
func NewWhistleblowerHandlers(store *whistleblower.Store) *WhistleblowerHandlers {
|
||||||
|
return &WhistleblowerHandlers{store: store}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Public Handlers (NO auth required — for anonymous reporters)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SubmitReport handles public report submission (no auth required)
|
||||||
|
// POST /sdk/v1/whistleblower/public/submit
|
||||||
|
func (h *WhistleblowerHandlers) SubmitReport(c *gin.Context) {
|
||||||
|
var req whistleblower.PublicReportSubmission
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get tenant ID from header or query param (public endpoint still needs tenant context)
|
||||||
|
tenantIDStr := c.GetHeader("X-Tenant-ID")
|
||||||
|
if tenantIDStr == "" {
|
||||||
|
tenantIDStr = c.Query("tenant_id")
|
||||||
|
}
|
||||||
|
if tenantIDStr == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "tenant_id is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tenantID, err := uuid.Parse(tenantIDStr)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid tenant_id"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report := &whistleblower.Report{
|
||||||
|
TenantID: tenantID,
|
||||||
|
Category: req.Category,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
IsAnonymous: req.IsAnonymous,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only set reporter info if not anonymous
|
||||||
|
if !req.IsAnonymous {
|
||||||
|
report.ReporterName = req.ReporterName
|
||||||
|
report.ReporterEmail = req.ReporterEmail
|
||||||
|
report.ReporterPhone = req.ReporterPhone
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.CreateReport(c.Request.Context(), report); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return reference number and access key (access key only shown ONCE!)
|
||||||
|
c.JSON(http.StatusCreated, whistleblower.PublicReportResponse{
|
||||||
|
ReferenceNumber: report.ReferenceNumber,
|
||||||
|
AccessKey: report.AccessKey,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetReportByAccessKey retrieves a report by access key (for anonymous reporters)
|
||||||
|
// GET /sdk/v1/whistleblower/public/report?access_key=xxx
|
||||||
|
func (h *WhistleblowerHandlers) GetReportByAccessKey(c *gin.Context) {
|
||||||
|
accessKey := c.Query("access_key")
|
||||||
|
if accessKey == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "access_key is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReportByAccessKey(c.Request.Context(), accessKey)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return limited fields for public access (no access_key, no internal details)
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"reference_number": report.ReferenceNumber,
|
||||||
|
"category": report.Category,
|
||||||
|
"status": report.Status,
|
||||||
|
"title": report.Title,
|
||||||
|
"received_at": report.ReceivedAt,
|
||||||
|
"deadline_acknowledgment": report.DeadlineAcknowledgment,
|
||||||
|
"deadline_feedback": report.DeadlineFeedback,
|
||||||
|
"acknowledged_at": report.AcknowledgedAt,
|
||||||
|
"closed_at": report.ClosedAt,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SendPublicMessage allows a reporter to send a message via access key
|
||||||
|
// POST /sdk/v1/whistleblower/public/message?access_key=xxx
|
||||||
|
func (h *WhistleblowerHandlers) SendPublicMessage(c *gin.Context) {
|
||||||
|
accessKey := c.Query("access_key")
|
||||||
|
if accessKey == "" {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "access_key is required"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReportByAccessKey(c.Request.Context(), accessKey)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req whistleblower.SendMessageRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := &whistleblower.AnonymousMessage{
|
||||||
|
ReportID: report.ID,
|
||||||
|
Direction: whistleblower.MessageDirectionReporterToAdmin,
|
||||||
|
Content: req.Content,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.AddMessage(c.Request.Context(), msg); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"message": msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Admin Handlers (auth required)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ListReports lists all reports for the tenant
|
||||||
|
// GET /sdk/v1/whistleblower/reports
|
||||||
|
func (h *WhistleblowerHandlers) ListReports(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
filters := &whistleblower.ReportFilters{
|
||||||
|
Limit: 50,
|
||||||
|
}
|
||||||
|
|
||||||
|
if status := c.Query("status"); status != "" {
|
||||||
|
filters.Status = whistleblower.ReportStatus(status)
|
||||||
|
}
|
||||||
|
if category := c.Query("category"); category != "" {
|
||||||
|
filters.Category = whistleblower.ReportCategory(category)
|
||||||
|
}
|
||||||
|
|
||||||
|
reports, total, err := h.store.ListReports(c.Request.Context(), tenantID, filters)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, whistleblower.ReportListResponse{
|
||||||
|
Reports: reports,
|
||||||
|
Total: total,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetReport retrieves a report by ID (admin)
|
||||||
|
// GET /sdk/v1/whistleblower/reports/:id
|
||||||
|
func (h *WhistleblowerHandlers) GetReport(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get messages and measures for full view
|
||||||
|
messages, _ := h.store.ListMessages(c.Request.Context(), id)
|
||||||
|
measures, _ := h.store.ListMeasures(c.Request.Context(), id)
|
||||||
|
|
||||||
|
// Do not expose access key to admin either
|
||||||
|
report.AccessKey = ""
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"report": report,
|
||||||
|
"messages": messages,
|
||||||
|
"measures": measures,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateReport updates a report
|
||||||
|
// PUT /sdk/v1/whistleblower/reports/:id
|
||||||
|
func (h *WhistleblowerHandlers) UpdateReport(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req whistleblower.ReportUpdateRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
if req.Category != "" {
|
||||||
|
report.Category = req.Category
|
||||||
|
}
|
||||||
|
if req.Status != "" {
|
||||||
|
report.Status = req.Status
|
||||||
|
}
|
||||||
|
if req.Title != "" {
|
||||||
|
report.Title = req.Title
|
||||||
|
}
|
||||||
|
if req.Description != "" {
|
||||||
|
report.Description = req.Description
|
||||||
|
}
|
||||||
|
if req.AssignedTo != nil {
|
||||||
|
report.AssignedTo = req.AssignedTo
|
||||||
|
}
|
||||||
|
|
||||||
|
report.AuditTrail = append(report.AuditTrail, whistleblower.AuditEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "report_updated",
|
||||||
|
UserID: userID.String(),
|
||||||
|
Details: "Report updated by admin",
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := h.store.UpdateReport(c.Request.Context(), report); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report.AccessKey = ""
|
||||||
|
c.JSON(http.StatusOK, gin.H{"report": report})
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteReport deletes a report
|
||||||
|
// DELETE /sdk/v1/whistleblower/reports/:id
|
||||||
|
func (h *WhistleblowerHandlers) DeleteReport(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.DeleteReport(c.Request.Context(), id); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "report deleted"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeReport acknowledges a report (within 7-day HinSchG deadline)
|
||||||
|
// POST /sdk/v1/whistleblower/reports/:id/acknowledge
|
||||||
|
func (h *WhistleblowerHandlers) AcknowledgeReport(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if report.AcknowledgedAt != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "report already acknowledged"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
if err := h.store.AcknowledgeReport(c.Request.Context(), id, userID); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optionally send acknowledgment message to reporter
|
||||||
|
var req whistleblower.AcknowledgeRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err == nil && req.Message != "" {
|
||||||
|
msg := &whistleblower.AnonymousMessage{
|
||||||
|
ReportID: id,
|
||||||
|
Direction: whistleblower.MessageDirectionAdminToReporter,
|
||||||
|
Content: req.Message,
|
||||||
|
}
|
||||||
|
h.store.AddMessage(c.Request.Context(), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if deadline was met
|
||||||
|
isOverdue := time.Now().UTC().After(report.DeadlineAcknowledgment)
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"message": "report acknowledged",
|
||||||
|
"is_overdue": isOverdue,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartInvestigation changes the report status to investigation
|
||||||
|
// POST /sdk/v1/whistleblower/reports/:id/investigate
|
||||||
|
func (h *WhistleblowerHandlers) StartInvestigation(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
report.Status = whistleblower.ReportStatusInvestigation
|
||||||
|
report.AuditTrail = append(report.AuditTrail, whistleblower.AuditEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "investigation_started",
|
||||||
|
UserID: userID.String(),
|
||||||
|
Details: "Investigation started",
|
||||||
|
})
|
||||||
|
|
||||||
|
if err := h.store.UpdateReport(c.Request.Context(), report); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"message": "investigation started",
|
||||||
|
"report": report,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddMeasure adds a corrective measure to a report
|
||||||
|
// POST /sdk/v1/whistleblower/reports/:id/measures
|
||||||
|
func (h *WhistleblowerHandlers) AddMeasure(c *gin.Context) {
|
||||||
|
reportID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify report exists
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), reportID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req whistleblower.AddMeasureRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
measure := &whistleblower.Measure{
|
||||||
|
ReportID: reportID,
|
||||||
|
Title: req.Title,
|
||||||
|
Description: req.Description,
|
||||||
|
Responsible: req.Responsible,
|
||||||
|
DueDate: req.DueDate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.AddMeasure(c.Request.Context(), measure); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update report status to measures_taken if not already
|
||||||
|
if report.Status != whistleblower.ReportStatusMeasuresTaken &&
|
||||||
|
report.Status != whistleblower.ReportStatusClosed {
|
||||||
|
report.Status = whistleblower.ReportStatusMeasuresTaken
|
||||||
|
report.AuditTrail = append(report.AuditTrail, whistleblower.AuditEntry{
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
Action: "measure_added",
|
||||||
|
UserID: userID.String(),
|
||||||
|
Details: "Corrective measure added: " + req.Title,
|
||||||
|
})
|
||||||
|
h.store.UpdateReport(c.Request.Context(), report)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"measure": measure})
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseReport closes a report with a resolution
|
||||||
|
// POST /sdk/v1/whistleblower/reports/:id/close
|
||||||
|
func (h *WhistleblowerHandlers) CloseReport(c *gin.Context) {
|
||||||
|
id, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req whistleblower.CloseReportRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
userID := rbac.GetUserID(c)
|
||||||
|
|
||||||
|
if err := h.store.CloseReport(c.Request.Context(), id, userID, req.Resolution); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{"message": "report closed"})
|
||||||
|
}
|
||||||
|
|
||||||
|
// SendAdminMessage sends a message from admin to reporter
|
||||||
|
// POST /sdk/v1/whistleblower/reports/:id/messages
|
||||||
|
func (h *WhistleblowerHandlers) SendAdminMessage(c *gin.Context) {
|
||||||
|
reportID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify report exists
|
||||||
|
report, err := h.store.GetReport(c.Request.Context(), reportID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if report == nil {
|
||||||
|
c.JSON(http.StatusNotFound, gin.H{"error": "report not found"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req whistleblower.SendMessageRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := &whistleblower.AnonymousMessage{
|
||||||
|
ReportID: reportID,
|
||||||
|
Direction: whistleblower.MessageDirectionAdminToReporter,
|
||||||
|
Content: req.Content,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.store.AddMessage(c.Request.Context(), msg); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{"message": msg})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListMessages lists messages for a report
|
||||||
|
// GET /sdk/v1/whistleblower/reports/:id/messages
|
||||||
|
func (h *WhistleblowerHandlers) ListMessages(c *gin.Context) {
|
||||||
|
reportID, err := uuid.Parse(c.Param("id"))
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid report ID"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
messages, err := h.store.ListMessages(c.Request.Context(), reportID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"messages": messages,
|
||||||
|
"total": len(messages),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStatistics returns whistleblower statistics for the tenant
|
||||||
|
// GET /sdk/v1/whistleblower/statistics
|
||||||
|
func (h *WhistleblowerHandlers) GetStatistics(c *gin.Context) {
|
||||||
|
tenantID := rbac.GetTenantID(c)
|
||||||
|
|
||||||
|
stats, err := h.store.GetStatistics(c.Request.Context(), tenantID)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, stats)
|
||||||
|
}
|
||||||
164
ai-compliance-sdk/internal/dsb/models.go
Normal file
164
ai-compliance-sdk/internal/dsb/models.go
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
package dsb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Core Models
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Assignment represents a DSB-to-tenant assignment.
|
||||||
|
type Assignment struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
DSBUserID uuid.UUID `json:"dsb_user_id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
TenantName string `json:"tenant_name"` // populated via JOIN
|
||||||
|
TenantSlug string `json:"tenant_slug"` // populated via JOIN
|
||||||
|
Status string `json:"status"` // active, paused, terminated
|
||||||
|
ContractStart time.Time `json:"contract_start"`
|
||||||
|
ContractEnd *time.Time `json:"contract_end,omitempty"`
|
||||||
|
MonthlyHoursBudget float64 `json:"monthly_hours_budget"`
|
||||||
|
Notes string `json:"notes"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// HourEntry represents a DSB time tracking entry.
|
||||||
|
type HourEntry struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
AssignmentID uuid.UUID `json:"assignment_id"`
|
||||||
|
Date time.Time `json:"date"`
|
||||||
|
Hours float64 `json:"hours"`
|
||||||
|
Category string `json:"category"` // dsfa_review, consultation, audit, training, incident_response, documentation, meeting, other
|
||||||
|
Description string `json:"description"`
|
||||||
|
Billable bool `json:"billable"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Task represents a DSB task/work item.
|
||||||
|
type Task struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
AssignmentID uuid.UUID `json:"assignment_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Category string `json:"category"` // dsfa_review, dsr_response, incident_review, audit_preparation, policy_review, training, consultation, other
|
||||||
|
Priority string `json:"priority"` // low, medium, high, urgent
|
||||||
|
Status string `json:"status"` // open, in_progress, waiting, completed, cancelled
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Communication represents a DSB communication log entry.
|
||||||
|
type Communication struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
AssignmentID uuid.UUID `json:"assignment_id"`
|
||||||
|
Direction string `json:"direction"` // inbound, outbound
|
||||||
|
Channel string `json:"channel"` // email, phone, meeting, portal, letter
|
||||||
|
Subject string `json:"subject"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Participants string `json:"participants"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Dashboard Models
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// DSBDashboard provides the aggregated overview for a DSB user.
|
||||||
|
type DSBDashboard struct {
|
||||||
|
Assignments []AssignmentOverview `json:"assignments"`
|
||||||
|
TotalAssignments int `json:"total_assignments"`
|
||||||
|
ActiveAssignments int `json:"active_assignments"`
|
||||||
|
TotalHoursThisMonth float64 `json:"total_hours_this_month"`
|
||||||
|
OpenTasks int `json:"open_tasks"`
|
||||||
|
UrgentTasks int `json:"urgent_tasks"`
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AssignmentOverview enriches an Assignment with aggregated metrics.
|
||||||
|
type AssignmentOverview struct {
|
||||||
|
Assignment
|
||||||
|
ComplianceScore int `json:"compliance_score"`
|
||||||
|
HoursThisMonth float64 `json:"hours_this_month"`
|
||||||
|
HoursBudget float64 `json:"hours_budget"`
|
||||||
|
OpenTaskCount int `json:"open_task_count"`
|
||||||
|
UrgentTaskCount int `json:"urgent_task_count"`
|
||||||
|
NextDeadline *time.Time `json:"next_deadline,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Request Models
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateAssignmentRequest is the request body for creating an assignment.
|
||||||
|
type CreateAssignmentRequest struct {
|
||||||
|
DSBUserID uuid.UUID `json:"dsb_user_id" binding:"required"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" binding:"required"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
ContractStart time.Time `json:"contract_start" binding:"required"`
|
||||||
|
ContractEnd *time.Time `json:"contract_end,omitempty"`
|
||||||
|
MonthlyHoursBudget float64 `json:"monthly_hours_budget"`
|
||||||
|
Notes string `json:"notes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateAssignmentRequest is the request body for updating an assignment.
|
||||||
|
type UpdateAssignmentRequest struct {
|
||||||
|
Status *string `json:"status,omitempty"`
|
||||||
|
ContractEnd *time.Time `json:"contract_end,omitempty"`
|
||||||
|
MonthlyHoursBudget *float64 `json:"monthly_hours_budget,omitempty"`
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateHourEntryRequest is the request body for creating a time entry.
|
||||||
|
type CreateHourEntryRequest struct {
|
||||||
|
Date time.Time `json:"date" binding:"required"`
|
||||||
|
Hours float64 `json:"hours" binding:"required"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Billable *bool `json:"billable,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTaskRequest is the request body for creating a task.
|
||||||
|
type CreateTaskRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
Priority string `json:"priority"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTaskRequest is the request body for updating a task.
|
||||||
|
type UpdateTaskRequest struct {
|
||||||
|
Title *string `json:"title,omitempty"`
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
Category *string `json:"category,omitempty"`
|
||||||
|
Priority *string `json:"priority,omitempty"`
|
||||||
|
Status *string `json:"status,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateCommunicationRequest is the request body for creating a communication entry.
|
||||||
|
type CreateCommunicationRequest struct {
|
||||||
|
Direction string `json:"direction" binding:"required"`
|
||||||
|
Channel string `json:"channel" binding:"required"`
|
||||||
|
Subject string `json:"subject" binding:"required"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
Participants string `json:"participants"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Summary Models
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// HoursSummary provides aggregated hour statistics for an assignment.
|
||||||
|
type HoursSummary struct {
|
||||||
|
TotalHours float64 `json:"total_hours"`
|
||||||
|
BillableHours float64 `json:"billable_hours"`
|
||||||
|
ByCategory map[string]float64 `json:"by_category"`
|
||||||
|
Period string `json:"period"` // YYYY-MM or "all"
|
||||||
|
}
|
||||||
510
ai-compliance-sdk/internal/dsb/store.go
Normal file
510
ai-compliance-sdk/internal/dsb/store.go
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
package dsb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store provides database operations for the DSB portal.
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
reportingStore *reporting.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new DSB store.
|
||||||
|
func NewStore(pool *pgxpool.Pool, reportingStore *reporting.Store) *Store {
|
||||||
|
return &Store{
|
||||||
|
pool: pool,
|
||||||
|
reportingStore: reportingStore,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pool returns the underlying connection pool for direct queries when needed.
|
||||||
|
func (s *Store) Pool() *pgxpool.Pool {
|
||||||
|
return s.pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Dashboard
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetDashboard generates the aggregated DSB dashboard for a given DSB user.
|
||||||
|
func (s *Store) GetDashboard(ctx context.Context, dsbUserID uuid.UUID) (*DSBDashboard, error) {
|
||||||
|
assignments, err := s.ListAssignments(ctx, dsbUserID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("list assignments: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
now := time.Now().UTC()
|
||||||
|
currentMonth := now.Format("2006-01")
|
||||||
|
|
||||||
|
dashboard := &DSBDashboard{
|
||||||
|
Assignments: make([]AssignmentOverview, 0, len(assignments)),
|
||||||
|
GeneratedAt: now,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, a := range assignments {
|
||||||
|
overview := AssignmentOverview{
|
||||||
|
Assignment: a,
|
||||||
|
HoursBudget: a.MonthlyHoursBudget,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enrich with compliance score (error-tolerant)
|
||||||
|
if s.reportingStore != nil {
|
||||||
|
report, err := s.reportingStore.GenerateReport(ctx, a.TenantID)
|
||||||
|
if err == nil && report != nil {
|
||||||
|
overview.ComplianceScore = report.ComplianceScore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hours this month
|
||||||
|
summary, err := s.GetHoursSummary(ctx, a.ID, currentMonth)
|
||||||
|
if err == nil && summary != nil {
|
||||||
|
overview.HoursThisMonth = summary.TotalHours
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open and urgent tasks
|
||||||
|
openTasks, err := s.ListTasks(ctx, a.ID, "open")
|
||||||
|
if err == nil {
|
||||||
|
overview.OpenTaskCount = len(openTasks)
|
||||||
|
for _, t := range openTasks {
|
||||||
|
if t.Priority == "urgent" {
|
||||||
|
overview.UrgentTaskCount++
|
||||||
|
}
|
||||||
|
if t.DueDate != nil && (overview.NextDeadline == nil || t.DueDate.Before(*overview.NextDeadline)) {
|
||||||
|
overview.NextDeadline = t.DueDate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also count in_progress tasks
|
||||||
|
inProgressTasks, err := s.ListTasks(ctx, a.ID, "in_progress")
|
||||||
|
if err == nil {
|
||||||
|
overview.OpenTaskCount += len(inProgressTasks)
|
||||||
|
for _, t := range inProgressTasks {
|
||||||
|
if t.Priority == "urgent" {
|
||||||
|
overview.UrgentTaskCount++
|
||||||
|
}
|
||||||
|
if t.DueDate != nil && (overview.NextDeadline == nil || t.DueDate.Before(*overview.NextDeadline)) {
|
||||||
|
overview.NextDeadline = t.DueDate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dashboard.Assignments = append(dashboard.Assignments, overview)
|
||||||
|
dashboard.TotalAssignments++
|
||||||
|
if a.Status == "active" {
|
||||||
|
dashboard.ActiveAssignments++
|
||||||
|
}
|
||||||
|
dashboard.TotalHoursThisMonth += overview.HoursThisMonth
|
||||||
|
dashboard.OpenTasks += overview.OpenTaskCount
|
||||||
|
dashboard.UrgentTasks += overview.UrgentTaskCount
|
||||||
|
}
|
||||||
|
|
||||||
|
return dashboard, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Assignments
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateAssignment inserts a new DSB assignment.
|
||||||
|
func (s *Store) CreateAssignment(ctx context.Context, a *Assignment) error {
|
||||||
|
a.ID = uuid.New()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
a.CreatedAt = now
|
||||||
|
a.UpdatedAt = now
|
||||||
|
|
||||||
|
if a.Status == "" {
|
||||||
|
a.Status = "active"
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO dsb_assignments (id, dsb_user_id, tenant_id, status, contract_start, contract_end, monthly_hours_budget, notes, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
|
`, a.ID, a.DSBUserID, a.TenantID, a.Status, a.ContractStart, a.ContractEnd, a.MonthlyHoursBudget, a.Notes, a.CreatedAt, a.UpdatedAt)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("insert assignment: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListAssignments returns all assignments for a given DSB user, joined with tenant info.
|
||||||
|
func (s *Store) ListAssignments(ctx context.Context, dsbUserID uuid.UUID) ([]Assignment, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT a.id, a.dsb_user_id, a.tenant_id, ct.name, ct.slug,
|
||||||
|
a.status, a.contract_start, a.contract_end,
|
||||||
|
a.monthly_hours_budget, a.notes, a.created_at, a.updated_at
|
||||||
|
FROM dsb_assignments a
|
||||||
|
JOIN compliance_tenants ct ON ct.id = a.tenant_id
|
||||||
|
WHERE a.dsb_user_id = $1
|
||||||
|
ORDER BY a.created_at DESC
|
||||||
|
`, dsbUserID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query assignments: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var assignments []Assignment
|
||||||
|
for rows.Next() {
|
||||||
|
var a Assignment
|
||||||
|
if err := rows.Scan(
|
||||||
|
&a.ID, &a.DSBUserID, &a.TenantID, &a.TenantName, &a.TenantSlug,
|
||||||
|
&a.Status, &a.ContractStart, &a.ContractEnd,
|
||||||
|
&a.MonthlyHoursBudget, &a.Notes, &a.CreatedAt, &a.UpdatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan assignment: %w", err)
|
||||||
|
}
|
||||||
|
assignments = append(assignments, a)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assignments == nil {
|
||||||
|
assignments = []Assignment{}
|
||||||
|
}
|
||||||
|
return assignments, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAssignment retrieves a single assignment by ID.
|
||||||
|
func (s *Store) GetAssignment(ctx context.Context, id uuid.UUID) (*Assignment, error) {
|
||||||
|
var a Assignment
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT a.id, a.dsb_user_id, a.tenant_id, ct.name, ct.slug,
|
||||||
|
a.status, a.contract_start, a.contract_end,
|
||||||
|
a.monthly_hours_budget, a.notes, a.created_at, a.updated_at
|
||||||
|
FROM dsb_assignments a
|
||||||
|
JOIN compliance_tenants ct ON ct.id = a.tenant_id
|
||||||
|
WHERE a.id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&a.ID, &a.DSBUserID, &a.TenantID, &a.TenantName, &a.TenantSlug,
|
||||||
|
&a.Status, &a.ContractStart, &a.ContractEnd,
|
||||||
|
&a.MonthlyHoursBudget, &a.Notes, &a.CreatedAt, &a.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("get assignment: %w", err)
|
||||||
|
}
|
||||||
|
return &a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateAssignment updates an existing assignment.
|
||||||
|
func (s *Store) UpdateAssignment(ctx context.Context, a *Assignment) error {
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE dsb_assignments
|
||||||
|
SET status = $2, contract_end = $3, monthly_hours_budget = $4, notes = $5, updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, a.ID, a.Status, a.ContractEnd, a.MonthlyHoursBudget, a.Notes)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("update assignment: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Hours
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateHourEntry inserts a new time tracking entry.
|
||||||
|
func (s *Store) CreateHourEntry(ctx context.Context, h *HourEntry) error {
|
||||||
|
h.ID = uuid.New()
|
||||||
|
h.CreatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO dsb_hours (id, assignment_id, date, hours, category, description, billable, created_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
`, h.ID, h.AssignmentID, h.Date, h.Hours, h.Category, h.Description, h.Billable, h.CreatedAt)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("insert hour entry: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListHours returns time entries for an assignment, optionally filtered by month (YYYY-MM).
|
||||||
|
func (s *Store) ListHours(ctx context.Context, assignmentID uuid.UUID, month string) ([]HourEntry, error) {
|
||||||
|
var query string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
if month != "" {
|
||||||
|
query = `
|
||||||
|
SELECT id, assignment_id, date, hours, category, description, billable, created_at
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
|
||||||
|
ORDER BY date DESC, created_at DESC
|
||||||
|
`
|
||||||
|
args = []interface{}{assignmentID, month}
|
||||||
|
} else {
|
||||||
|
query = `
|
||||||
|
SELECT id, assignment_id, date, hours, category, description, billable, created_at
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1
|
||||||
|
ORDER BY date DESC, created_at DESC
|
||||||
|
`
|
||||||
|
args = []interface{}{assignmentID}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query hours: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var entries []HourEntry
|
||||||
|
for rows.Next() {
|
||||||
|
var h HourEntry
|
||||||
|
if err := rows.Scan(
|
||||||
|
&h.ID, &h.AssignmentID, &h.Date, &h.Hours, &h.Category,
|
||||||
|
&h.Description, &h.Billable, &h.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan hour entry: %w", err)
|
||||||
|
}
|
||||||
|
entries = append(entries, h)
|
||||||
|
}
|
||||||
|
|
||||||
|
if entries == nil {
|
||||||
|
entries = []HourEntry{}
|
||||||
|
}
|
||||||
|
return entries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetHoursSummary returns aggregated hour statistics for an assignment, optionally filtered by month.
|
||||||
|
func (s *Store) GetHoursSummary(ctx context.Context, assignmentID uuid.UUID, month string) (*HoursSummary, error) {
|
||||||
|
summary := &HoursSummary{
|
||||||
|
ByCategory: make(map[string]float64),
|
||||||
|
Period: "all",
|
||||||
|
}
|
||||||
|
|
||||||
|
if month != "" {
|
||||||
|
summary.Period = month
|
||||||
|
}
|
||||||
|
|
||||||
|
// Total and billable hours
|
||||||
|
var totalQuery string
|
||||||
|
var totalArgs []interface{}
|
||||||
|
|
||||||
|
if month != "" {
|
||||||
|
totalQuery = `
|
||||||
|
SELECT COALESCE(SUM(hours), 0), COALESCE(SUM(CASE WHEN billable THEN hours ELSE 0 END), 0)
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
|
||||||
|
`
|
||||||
|
totalArgs = []interface{}{assignmentID, month}
|
||||||
|
} else {
|
||||||
|
totalQuery = `
|
||||||
|
SELECT COALESCE(SUM(hours), 0), COALESCE(SUM(CASE WHEN billable THEN hours ELSE 0 END), 0)
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1
|
||||||
|
`
|
||||||
|
totalArgs = []interface{}{assignmentID}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, totalQuery, totalArgs...).Scan(&summary.TotalHours, &summary.BillableHours)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query hours summary totals: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hours by category
|
||||||
|
var catQuery string
|
||||||
|
var catArgs []interface{}
|
||||||
|
|
||||||
|
if month != "" {
|
||||||
|
catQuery = `
|
||||||
|
SELECT category, COALESCE(SUM(hours), 0)
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1 AND to_char(date, 'YYYY-MM') = $2
|
||||||
|
GROUP BY category
|
||||||
|
`
|
||||||
|
catArgs = []interface{}{assignmentID, month}
|
||||||
|
} else {
|
||||||
|
catQuery = `
|
||||||
|
SELECT category, COALESCE(SUM(hours), 0)
|
||||||
|
FROM dsb_hours
|
||||||
|
WHERE assignment_id = $1
|
||||||
|
GROUP BY category
|
||||||
|
`
|
||||||
|
catArgs = []interface{}{assignmentID}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, catQuery, catArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query hours by category: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var cat string
|
||||||
|
var hours float64
|
||||||
|
if err := rows.Scan(&cat, &hours); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan category hours: %w", err)
|
||||||
|
}
|
||||||
|
summary.ByCategory[cat] = hours
|
||||||
|
}
|
||||||
|
|
||||||
|
return summary, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Tasks
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateTask inserts a new DSB task.
|
||||||
|
func (s *Store) CreateTask(ctx context.Context, t *Task) error {
|
||||||
|
t.ID = uuid.New()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
t.CreatedAt = now
|
||||||
|
t.UpdatedAt = now
|
||||||
|
|
||||||
|
if t.Status == "" {
|
||||||
|
t.Status = "open"
|
||||||
|
}
|
||||||
|
if t.Priority == "" {
|
||||||
|
t.Priority = "medium"
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO dsb_tasks (id, assignment_id, title, description, category, priority, status, due_date, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
|
`, t.ID, t.AssignmentID, t.Title, t.Description, t.Category, t.Priority, t.Status, t.DueDate, t.CreatedAt, t.UpdatedAt)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("insert task: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTasks returns tasks for an assignment, optionally filtered by status.
|
||||||
|
func (s *Store) ListTasks(ctx context.Context, assignmentID uuid.UUID, status string) ([]Task, error) {
|
||||||
|
var query string
|
||||||
|
var args []interface{}
|
||||||
|
|
||||||
|
if status != "" {
|
||||||
|
query = `
|
||||||
|
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
|
||||||
|
FROM dsb_tasks
|
||||||
|
WHERE assignment_id = $1 AND status = $2
|
||||||
|
ORDER BY CASE priority
|
||||||
|
WHEN 'urgent' THEN 1
|
||||||
|
WHEN 'high' THEN 2
|
||||||
|
WHEN 'medium' THEN 3
|
||||||
|
WHEN 'low' THEN 4
|
||||||
|
ELSE 5
|
||||||
|
END, due_date ASC NULLS LAST, created_at DESC
|
||||||
|
`
|
||||||
|
args = []interface{}{assignmentID, status}
|
||||||
|
} else {
|
||||||
|
query = `
|
||||||
|
SELECT id, assignment_id, title, description, category, priority, status, due_date, completed_at, created_at, updated_at
|
||||||
|
FROM dsb_tasks
|
||||||
|
WHERE assignment_id = $1
|
||||||
|
ORDER BY CASE priority
|
||||||
|
WHEN 'urgent' THEN 1
|
||||||
|
WHEN 'high' THEN 2
|
||||||
|
WHEN 'medium' THEN 3
|
||||||
|
WHEN 'low' THEN 4
|
||||||
|
ELSE 5
|
||||||
|
END, due_date ASC NULLS LAST, created_at DESC
|
||||||
|
`
|
||||||
|
args = []interface{}{assignmentID}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query tasks: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var tasks []Task
|
||||||
|
for rows.Next() {
|
||||||
|
var t Task
|
||||||
|
if err := rows.Scan(
|
||||||
|
&t.ID, &t.AssignmentID, &t.Title, &t.Description, &t.Category,
|
||||||
|
&t.Priority, &t.Status, &t.DueDate, &t.CompletedAt,
|
||||||
|
&t.CreatedAt, &t.UpdatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan task: %w", err)
|
||||||
|
}
|
||||||
|
tasks = append(tasks, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tasks == nil {
|
||||||
|
tasks = []Task{}
|
||||||
|
}
|
||||||
|
return tasks, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTask updates an existing task.
|
||||||
|
func (s *Store) UpdateTask(ctx context.Context, t *Task) error {
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE dsb_tasks
|
||||||
|
SET title = $2, description = $3, category = $4, priority = $5, status = $6, due_date = $7, updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, t.ID, t.Title, t.Description, t.Category, t.Priority, t.Status, t.DueDate)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("update task: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteTask marks a task as completed with the current timestamp.
|
||||||
|
func (s *Store) CompleteTask(ctx context.Context, taskID uuid.UUID) error {
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE dsb_tasks
|
||||||
|
SET status = 'completed', completed_at = NOW(), updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, taskID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("complete task: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Communications
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateCommunication inserts a new communication log entry.
|
||||||
|
func (s *Store) CreateCommunication(ctx context.Context, c *Communication) error {
|
||||||
|
c.ID = uuid.New()
|
||||||
|
c.CreatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO dsb_communications (id, assignment_id, direction, channel, subject, content, participants, created_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
`, c.ID, c.AssignmentID, c.Direction, c.Channel, c.Subject, c.Content, c.Participants, c.CreatedAt)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("insert communication: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCommunications returns all communication entries for an assignment.
|
||||||
|
func (s *Store) ListCommunications(ctx context.Context, assignmentID uuid.UUID) ([]Communication, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT id, assignment_id, direction, channel, subject, content, participants, created_at
|
||||||
|
FROM dsb_communications
|
||||||
|
WHERE assignment_id = $1
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
`, assignmentID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query communications: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var comms []Communication
|
||||||
|
for rows.Next() {
|
||||||
|
var c Communication
|
||||||
|
if err := rows.Scan(
|
||||||
|
&c.ID, &c.AssignmentID, &c.Direction, &c.Channel,
|
||||||
|
&c.Subject, &c.Content, &c.Participants, &c.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan communication: %w", err)
|
||||||
|
}
|
||||||
|
comms = append(comms, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
if comms == nil {
|
||||||
|
comms = []Communication{}
|
||||||
|
}
|
||||||
|
return comms, nil
|
||||||
|
}
|
||||||
305
ai-compliance-sdk/internal/incidents/models.go
Normal file
305
ai-compliance-sdk/internal/incidents/models.go
Normal file
@@ -0,0 +1,305 @@
|
|||||||
|
package incidents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Constants / Enums
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// IncidentCategory represents the category of a security/data breach incident
|
||||||
|
type IncidentCategory string
|
||||||
|
|
||||||
|
const (
|
||||||
|
IncidentCategoryDataBreach IncidentCategory = "data_breach"
|
||||||
|
IncidentCategoryUnauthorizedAccess IncidentCategory = "unauthorized_access"
|
||||||
|
IncidentCategoryDataLoss IncidentCategory = "data_loss"
|
||||||
|
IncidentCategorySystemCompromise IncidentCategory = "system_compromise"
|
||||||
|
IncidentCategoryPhishing IncidentCategory = "phishing"
|
||||||
|
IncidentCategoryRansomware IncidentCategory = "ransomware"
|
||||||
|
IncidentCategoryInsiderThreat IncidentCategory = "insider_threat"
|
||||||
|
IncidentCategoryPhysicalBreach IncidentCategory = "physical_breach"
|
||||||
|
IncidentCategoryOther IncidentCategory = "other"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IncidentStatus represents the status of an incident through its lifecycle
|
||||||
|
type IncidentStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
IncidentStatusDetected IncidentStatus = "detected"
|
||||||
|
IncidentStatusAssessment IncidentStatus = "assessment"
|
||||||
|
IncidentStatusContainment IncidentStatus = "containment"
|
||||||
|
IncidentStatusNotificationRequired IncidentStatus = "notification_required"
|
||||||
|
IncidentStatusNotificationSent IncidentStatus = "notification_sent"
|
||||||
|
IncidentStatusRemediation IncidentStatus = "remediation"
|
||||||
|
IncidentStatusClosed IncidentStatus = "closed"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IncidentSeverity represents the severity level of an incident
|
||||||
|
type IncidentSeverity string
|
||||||
|
|
||||||
|
const (
|
||||||
|
IncidentSeverityCritical IncidentSeverity = "critical"
|
||||||
|
IncidentSeverityHigh IncidentSeverity = "high"
|
||||||
|
IncidentSeverityMedium IncidentSeverity = "medium"
|
||||||
|
IncidentSeverityLow IncidentSeverity = "low"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MeasureType represents the type of corrective measure
|
||||||
|
type MeasureType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MeasureTypeImmediate MeasureType = "immediate"
|
||||||
|
MeasureTypeLongTerm MeasureType = "long_term"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MeasureStatus represents the status of a corrective measure
|
||||||
|
type MeasureStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MeasureStatusPlanned MeasureStatus = "planned"
|
||||||
|
MeasureStatusInProgress MeasureStatus = "in_progress"
|
||||||
|
MeasureStatusCompleted MeasureStatus = "completed"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NotificationStatus represents the status of a notification (authority or data subject)
|
||||||
|
type NotificationStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
NotificationStatusNotRequired NotificationStatus = "not_required"
|
||||||
|
NotificationStatusPending NotificationStatus = "pending"
|
||||||
|
NotificationStatusSent NotificationStatus = "sent"
|
||||||
|
NotificationStatusConfirmed NotificationStatus = "confirmed"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Main Entities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Incident represents a security or data breach incident per DSGVO Art. 33/34
|
||||||
|
type Incident struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
|
||||||
|
// Incident info
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Category IncidentCategory `json:"category"`
|
||||||
|
Status IncidentStatus `json:"status"`
|
||||||
|
Severity IncidentSeverity `json:"severity"`
|
||||||
|
|
||||||
|
// Detection & reporting
|
||||||
|
DetectedAt time.Time `json:"detected_at"`
|
||||||
|
ReportedBy uuid.UUID `json:"reported_by"`
|
||||||
|
|
||||||
|
// Affected scope
|
||||||
|
AffectedDataCategories []string `json:"affected_data_categories"` // JSONB
|
||||||
|
AffectedDataSubjectCount int `json:"affected_data_subject_count"`
|
||||||
|
AffectedSystems []string `json:"affected_systems"` // JSONB
|
||||||
|
|
||||||
|
// Assessments & notifications (JSONB embedded objects)
|
||||||
|
RiskAssessment *RiskAssessment `json:"risk_assessment,omitempty"`
|
||||||
|
AuthorityNotification *AuthorityNotification `json:"authority_notification,omitempty"`
|
||||||
|
DataSubjectNotification *DataSubjectNotification `json:"data_subject_notification,omitempty"`
|
||||||
|
|
||||||
|
// Resolution
|
||||||
|
RootCause string `json:"root_cause,omitempty"`
|
||||||
|
LessonsLearned string `json:"lessons_learned,omitempty"`
|
||||||
|
|
||||||
|
// Timeline (JSONB array)
|
||||||
|
Timeline []TimelineEntry `json:"timeline"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
ClosedAt *time.Time `json:"closed_at,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RiskAssessment contains the risk assessment for an incident
|
||||||
|
type RiskAssessment struct {
|
||||||
|
Likelihood int `json:"likelihood"` // 1-5
|
||||||
|
Impact int `json:"impact"` // 1-5
|
||||||
|
RiskLevel string `json:"risk_level"` // critical, high, medium, low (auto-calculated)
|
||||||
|
AssessedAt time.Time `json:"assessed_at"`
|
||||||
|
AssessedBy uuid.UUID `json:"assessed_by"`
|
||||||
|
Notes string `json:"notes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AuthorityNotification tracks the supervisory authority notification per DSGVO Art. 33
|
||||||
|
type AuthorityNotification struct {
|
||||||
|
Status NotificationStatus `json:"status"`
|
||||||
|
Deadline time.Time `json:"deadline"` // 72h from detected_at per Art. 33
|
||||||
|
SubmittedAt *time.Time `json:"submitted_at,omitempty"`
|
||||||
|
AuthorityName string `json:"authority_name,omitempty"`
|
||||||
|
ReferenceNumber string `json:"reference_number,omitempty"`
|
||||||
|
ContactPerson string `json:"contact_person,omitempty"`
|
||||||
|
Notes string `json:"notes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// DataSubjectNotification tracks the data subject notification per DSGVO Art. 34
|
||||||
|
type DataSubjectNotification struct {
|
||||||
|
Required bool `json:"required"`
|
||||||
|
Status NotificationStatus `json:"status"`
|
||||||
|
SentAt *time.Time `json:"sent_at,omitempty"`
|
||||||
|
AffectedCount int `json:"affected_count"`
|
||||||
|
NotificationText string `json:"notification_text,omitempty"`
|
||||||
|
Channel string `json:"channel,omitempty"` // email, letter, website
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimelineEntry represents a single event in the incident timeline
|
||||||
|
type TimelineEntry struct {
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Action string `json:"action"`
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
Details string `json:"details,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncidentMeasure represents a corrective or preventive measure for an incident
|
||||||
|
type IncidentMeasure struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
IncidentID uuid.UUID `json:"incident_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
MeasureType MeasureType `json:"measure_type"`
|
||||||
|
Status MeasureStatus `json:"status"`
|
||||||
|
Responsible string `json:"responsible,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncidentStatistics contains aggregated incident statistics for a tenant
|
||||||
|
type IncidentStatistics struct {
|
||||||
|
TotalIncidents int `json:"total_incidents"`
|
||||||
|
OpenIncidents int `json:"open_incidents"`
|
||||||
|
ByStatus map[string]int `json:"by_status"`
|
||||||
|
BySeverity map[string]int `json:"by_severity"`
|
||||||
|
ByCategory map[string]int `json:"by_category"`
|
||||||
|
NotificationsPending int `json:"notifications_pending"`
|
||||||
|
AvgResolutionHours float64 `json:"avg_resolution_hours"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateIncidentRequest is the API request for creating an incident
|
||||||
|
type CreateIncidentRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Category IncidentCategory `json:"category" binding:"required"`
|
||||||
|
Severity IncidentSeverity `json:"severity" binding:"required"`
|
||||||
|
DetectedAt *time.Time `json:"detected_at,omitempty"` // defaults to now
|
||||||
|
AffectedDataCategories []string `json:"affected_data_categories,omitempty"`
|
||||||
|
AffectedDataSubjectCount int `json:"affected_data_subject_count,omitempty"`
|
||||||
|
AffectedSystems []string `json:"affected_systems,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateIncidentRequest is the API request for updating an incident
|
||||||
|
type UpdateIncidentRequest struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Category IncidentCategory `json:"category,omitempty"`
|
||||||
|
Status IncidentStatus `json:"status,omitempty"`
|
||||||
|
Severity IncidentSeverity `json:"severity,omitempty"`
|
||||||
|
AffectedDataCategories []string `json:"affected_data_categories,omitempty"`
|
||||||
|
AffectedDataSubjectCount *int `json:"affected_data_subject_count,omitempty"`
|
||||||
|
AffectedSystems []string `json:"affected_systems,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RiskAssessmentRequest is the API request for assessing risk
|
||||||
|
type RiskAssessmentRequest struct {
|
||||||
|
Likelihood int `json:"likelihood" binding:"required,min=1,max=5"`
|
||||||
|
Impact int `json:"impact" binding:"required,min=1,max=5"`
|
||||||
|
Notes string `json:"notes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SubmitAuthorityNotificationRequest is the API request for submitting authority notification
|
||||||
|
type SubmitAuthorityNotificationRequest struct {
|
||||||
|
AuthorityName string `json:"authority_name" binding:"required"`
|
||||||
|
ContactPerson string `json:"contact_person,omitempty"`
|
||||||
|
ReferenceNumber string `json:"reference_number,omitempty"`
|
||||||
|
Notes string `json:"notes,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotifyDataSubjectsRequest is the API request for notifying data subjects
|
||||||
|
type NotifyDataSubjectsRequest struct {
|
||||||
|
NotificationText string `json:"notification_text" binding:"required"`
|
||||||
|
Channel string `json:"channel" binding:"required"` // email, letter, website
|
||||||
|
AffectedCount int `json:"affected_count,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddMeasureRequest is the API request for adding a corrective measure
|
||||||
|
type AddMeasureRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
MeasureType MeasureType `json:"measure_type" binding:"required"`
|
||||||
|
Responsible string `json:"responsible,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseIncidentRequest is the API request for closing an incident
|
||||||
|
type CloseIncidentRequest struct {
|
||||||
|
RootCause string `json:"root_cause" binding:"required"`
|
||||||
|
LessonsLearned string `json:"lessons_learned,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTimelineEntryRequest is the API request for adding a timeline entry
|
||||||
|
type AddTimelineEntryRequest struct {
|
||||||
|
Action string `json:"action" binding:"required"`
|
||||||
|
Details string `json:"details,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncidentListResponse is the API response for listing incidents
|
||||||
|
type IncidentListResponse struct {
|
||||||
|
Incidents []Incident `json:"incidents"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IncidentFilters defines filters for listing incidents
|
||||||
|
type IncidentFilters struct {
|
||||||
|
Status IncidentStatus
|
||||||
|
Severity IncidentSeverity
|
||||||
|
Category IncidentCategory
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CalculateRiskLevel calculates the risk level from likelihood and impact scores.
|
||||||
|
// Risk score = likelihood * impact. Thresholds:
|
||||||
|
// - critical: score >= 20
|
||||||
|
// - high: score >= 12
|
||||||
|
// - medium: score >= 6
|
||||||
|
// - low: score < 6
|
||||||
|
func CalculateRiskLevel(likelihood, impact int) string {
|
||||||
|
score := likelihood * impact
|
||||||
|
switch {
|
||||||
|
case score >= 20:
|
||||||
|
return "critical"
|
||||||
|
case score >= 12:
|
||||||
|
return "high"
|
||||||
|
case score >= 6:
|
||||||
|
return "medium"
|
||||||
|
default:
|
||||||
|
return "low"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate72hDeadline calculates the 72-hour notification deadline per DSGVO Art. 33.
|
||||||
|
// The supervisory authority must be notified within 72 hours of becoming aware of a breach.
|
||||||
|
func Calculate72hDeadline(detectedAt time.Time) time.Time {
|
||||||
|
return detectedAt.Add(72 * time.Hour)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsNotificationRequired determines whether authority notification is required
|
||||||
|
// based on the assessed risk level. Notification is required for critical and high risk.
|
||||||
|
func IsNotificationRequired(riskLevel string) bool {
|
||||||
|
return riskLevel == "critical" || riskLevel == "high"
|
||||||
|
}
|
||||||
571
ai-compliance-sdk/internal/incidents/store.go
Normal file
571
ai-compliance-sdk/internal/incidents/store.go
Normal file
@@ -0,0 +1,571 @@
|
|||||||
|
package incidents
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store handles incident data persistence
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new incident store
|
||||||
|
func NewStore(pool *pgxpool.Pool) *Store {
|
||||||
|
return &Store{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Incident CRUD Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateIncident creates a new incident
|
||||||
|
func (s *Store) CreateIncident(ctx context.Context, incident *Incident) error {
|
||||||
|
incident.ID = uuid.New()
|
||||||
|
incident.CreatedAt = time.Now().UTC()
|
||||||
|
incident.UpdatedAt = incident.CreatedAt
|
||||||
|
if incident.Status == "" {
|
||||||
|
incident.Status = IncidentStatusDetected
|
||||||
|
}
|
||||||
|
if incident.AffectedDataCategories == nil {
|
||||||
|
incident.AffectedDataCategories = []string{}
|
||||||
|
}
|
||||||
|
if incident.AffectedSystems == nil {
|
||||||
|
incident.AffectedSystems = []string{}
|
||||||
|
}
|
||||||
|
if incident.Timeline == nil {
|
||||||
|
incident.Timeline = []TimelineEntry{}
|
||||||
|
}
|
||||||
|
|
||||||
|
affectedDataCategories, _ := json.Marshal(incident.AffectedDataCategories)
|
||||||
|
affectedSystems, _ := json.Marshal(incident.AffectedSystems)
|
||||||
|
riskAssessment, _ := json.Marshal(incident.RiskAssessment)
|
||||||
|
authorityNotification, _ := json.Marshal(incident.AuthorityNotification)
|
||||||
|
dataSubjectNotification, _ := json.Marshal(incident.DataSubjectNotification)
|
||||||
|
timeline, _ := json.Marshal(incident.Timeline)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO incident_incidents (
|
||||||
|
id, tenant_id, title, description, category, status, severity,
|
||||||
|
detected_at, reported_by,
|
||||||
|
affected_data_categories, affected_data_subject_count, affected_systems,
|
||||||
|
risk_assessment, authority_notification, data_subject_notification,
|
||||||
|
root_cause, lessons_learned, timeline,
|
||||||
|
created_at, updated_at, closed_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5, $6, $7,
|
||||||
|
$8, $9,
|
||||||
|
$10, $11, $12,
|
||||||
|
$13, $14, $15,
|
||||||
|
$16, $17, $18,
|
||||||
|
$19, $20, $21
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
incident.ID, incident.TenantID, incident.Title, incident.Description,
|
||||||
|
string(incident.Category), string(incident.Status), string(incident.Severity),
|
||||||
|
incident.DetectedAt, incident.ReportedBy,
|
||||||
|
affectedDataCategories, incident.AffectedDataSubjectCount, affectedSystems,
|
||||||
|
riskAssessment, authorityNotification, dataSubjectNotification,
|
||||||
|
incident.RootCause, incident.LessonsLearned, timeline,
|
||||||
|
incident.CreatedAt, incident.UpdatedAt, incident.ClosedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetIncident retrieves an incident by ID
|
||||||
|
func (s *Store) GetIncident(ctx context.Context, id uuid.UUID) (*Incident, error) {
|
||||||
|
var incident Incident
|
||||||
|
var category, status, severity string
|
||||||
|
var affectedDataCategories, affectedSystems []byte
|
||||||
|
var riskAssessment, authorityNotification, dataSubjectNotification []byte
|
||||||
|
var timeline []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, title, description, category, status, severity,
|
||||||
|
detected_at, reported_by,
|
||||||
|
affected_data_categories, affected_data_subject_count, affected_systems,
|
||||||
|
risk_assessment, authority_notification, data_subject_notification,
|
||||||
|
root_cause, lessons_learned, timeline,
|
||||||
|
created_at, updated_at, closed_at
|
||||||
|
FROM incident_incidents WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&incident.ID, &incident.TenantID, &incident.Title, &incident.Description,
|
||||||
|
&category, &status, &severity,
|
||||||
|
&incident.DetectedAt, &incident.ReportedBy,
|
||||||
|
&affectedDataCategories, &incident.AffectedDataSubjectCount, &affectedSystems,
|
||||||
|
&riskAssessment, &authorityNotification, &dataSubjectNotification,
|
||||||
|
&incident.RootCause, &incident.LessonsLearned, &timeline,
|
||||||
|
&incident.CreatedAt, &incident.UpdatedAt, &incident.ClosedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
incident.Category = IncidentCategory(category)
|
||||||
|
incident.Status = IncidentStatus(status)
|
||||||
|
incident.Severity = IncidentSeverity(severity)
|
||||||
|
|
||||||
|
json.Unmarshal(affectedDataCategories, &incident.AffectedDataCategories)
|
||||||
|
json.Unmarshal(affectedSystems, &incident.AffectedSystems)
|
||||||
|
json.Unmarshal(riskAssessment, &incident.RiskAssessment)
|
||||||
|
json.Unmarshal(authorityNotification, &incident.AuthorityNotification)
|
||||||
|
json.Unmarshal(dataSubjectNotification, &incident.DataSubjectNotification)
|
||||||
|
json.Unmarshal(timeline, &incident.Timeline)
|
||||||
|
|
||||||
|
if incident.AffectedDataCategories == nil {
|
||||||
|
incident.AffectedDataCategories = []string{}
|
||||||
|
}
|
||||||
|
if incident.AffectedSystems == nil {
|
||||||
|
incident.AffectedSystems = []string{}
|
||||||
|
}
|
||||||
|
if incident.Timeline == nil {
|
||||||
|
incident.Timeline = []TimelineEntry{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &incident, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListIncidents lists incidents for a tenant with optional filters
|
||||||
|
func (s *Store) ListIncidents(ctx context.Context, tenantID uuid.UUID, filters *IncidentFilters) ([]Incident, int, error) {
|
||||||
|
// Count query
|
||||||
|
countQuery := "SELECT COUNT(*) FROM incident_incidents WHERE tenant_id = $1"
|
||||||
|
countArgs := []interface{}{tenantID}
|
||||||
|
countArgIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.Status != "" {
|
||||||
|
countQuery += fmt.Sprintf(" AND status = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Status))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.Severity != "" {
|
||||||
|
countQuery += fmt.Sprintf(" AND severity = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Severity))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.Category != "" {
|
||||||
|
countQuery += fmt.Sprintf(" AND category = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Category))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var total int
|
||||||
|
err := s.pool.QueryRow(ctx, countQuery, countArgs...).Scan(&total)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Data query
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, title, description, category, status, severity,
|
||||||
|
detected_at, reported_by,
|
||||||
|
affected_data_categories, affected_data_subject_count, affected_systems,
|
||||||
|
risk_assessment, authority_notification, data_subject_notification,
|
||||||
|
root_cause, lessons_learned, timeline,
|
||||||
|
created_at, updated_at, closed_at
|
||||||
|
FROM incident_incidents WHERE tenant_id = $1`
|
||||||
|
|
||||||
|
args := []interface{}{tenantID}
|
||||||
|
argIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.Status != "" {
|
||||||
|
query += fmt.Sprintf(" AND status = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Status))
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filters.Severity != "" {
|
||||||
|
query += fmt.Sprintf(" AND severity = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Severity))
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filters.Category != "" {
|
||||||
|
query += fmt.Sprintf(" AND category = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Category))
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query += " ORDER BY detected_at DESC"
|
||||||
|
|
||||||
|
if filters != nil && filters.Limit > 0 {
|
||||||
|
query += fmt.Sprintf(" LIMIT $%d", argIdx)
|
||||||
|
args = append(args, filters.Limit)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
if filters.Offset > 0 {
|
||||||
|
query += fmt.Sprintf(" OFFSET $%d", argIdx)
|
||||||
|
args = append(args, filters.Offset)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var incidents []Incident
|
||||||
|
for rows.Next() {
|
||||||
|
var incident Incident
|
||||||
|
var category, status, severity string
|
||||||
|
var affectedDataCategories, affectedSystems []byte
|
||||||
|
var riskAssessment, authorityNotification, dataSubjectNotification []byte
|
||||||
|
var timeline []byte
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&incident.ID, &incident.TenantID, &incident.Title, &incident.Description,
|
||||||
|
&category, &status, &severity,
|
||||||
|
&incident.DetectedAt, &incident.ReportedBy,
|
||||||
|
&affectedDataCategories, &incident.AffectedDataSubjectCount, &affectedSystems,
|
||||||
|
&riskAssessment, &authorityNotification, &dataSubjectNotification,
|
||||||
|
&incident.RootCause, &incident.LessonsLearned, &timeline,
|
||||||
|
&incident.CreatedAt, &incident.UpdatedAt, &incident.ClosedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
incident.Category = IncidentCategory(category)
|
||||||
|
incident.Status = IncidentStatus(status)
|
||||||
|
incident.Severity = IncidentSeverity(severity)
|
||||||
|
|
||||||
|
json.Unmarshal(affectedDataCategories, &incident.AffectedDataCategories)
|
||||||
|
json.Unmarshal(affectedSystems, &incident.AffectedSystems)
|
||||||
|
json.Unmarshal(riskAssessment, &incident.RiskAssessment)
|
||||||
|
json.Unmarshal(authorityNotification, &incident.AuthorityNotification)
|
||||||
|
json.Unmarshal(dataSubjectNotification, &incident.DataSubjectNotification)
|
||||||
|
json.Unmarshal(timeline, &incident.Timeline)
|
||||||
|
|
||||||
|
if incident.AffectedDataCategories == nil {
|
||||||
|
incident.AffectedDataCategories = []string{}
|
||||||
|
}
|
||||||
|
if incident.AffectedSystems == nil {
|
||||||
|
incident.AffectedSystems = []string{}
|
||||||
|
}
|
||||||
|
if incident.Timeline == nil {
|
||||||
|
incident.Timeline = []TimelineEntry{}
|
||||||
|
}
|
||||||
|
|
||||||
|
incidents = append(incidents, incident)
|
||||||
|
}
|
||||||
|
|
||||||
|
return incidents, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateIncident updates an incident
|
||||||
|
func (s *Store) UpdateIncident(ctx context.Context, incident *Incident) error {
|
||||||
|
incident.UpdatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
affectedDataCategories, _ := json.Marshal(incident.AffectedDataCategories)
|
||||||
|
affectedSystems, _ := json.Marshal(incident.AffectedSystems)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
title = $2, description = $3, category = $4, status = $5, severity = $6,
|
||||||
|
affected_data_categories = $7, affected_data_subject_count = $8, affected_systems = $9,
|
||||||
|
root_cause = $10, lessons_learned = $11,
|
||||||
|
updated_at = $12
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
incident.ID, incident.Title, incident.Description,
|
||||||
|
string(incident.Category), string(incident.Status), string(incident.Severity),
|
||||||
|
affectedDataCategories, incident.AffectedDataSubjectCount, affectedSystems,
|
||||||
|
incident.RootCause, incident.LessonsLearned,
|
||||||
|
incident.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteIncident deletes an incident and its related measures (cascade handled by FK)
|
||||||
|
func (s *Store) DeleteIncident(ctx context.Context, id uuid.UUID) error {
|
||||||
|
_, err := s.pool.Exec(ctx, "DELETE FROM incident_incidents WHERE id = $1", id)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Risk Assessment Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// UpdateRiskAssessment updates the risk assessment for an incident
|
||||||
|
func (s *Store) UpdateRiskAssessment(ctx context.Context, incidentID uuid.UUID, assessment *RiskAssessment) error {
|
||||||
|
assessmentJSON, _ := json.Marshal(assessment)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
risk_assessment = $2,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, incidentID, assessmentJSON)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Notification Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// UpdateAuthorityNotification updates the authority notification for an incident
|
||||||
|
func (s *Store) UpdateAuthorityNotification(ctx context.Context, incidentID uuid.UUID, notification *AuthorityNotification) error {
|
||||||
|
notificationJSON, _ := json.Marshal(notification)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
authority_notification = $2,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, incidentID, notificationJSON)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateDataSubjectNotification updates the data subject notification for an incident
|
||||||
|
func (s *Store) UpdateDataSubjectNotification(ctx context.Context, incidentID uuid.UUID, notification *DataSubjectNotification) error {
|
||||||
|
notificationJSON, _ := json.Marshal(notification)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
data_subject_notification = $2,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, incidentID, notificationJSON)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Measure Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddMeasure adds a corrective measure to an incident
|
||||||
|
func (s *Store) AddMeasure(ctx context.Context, measure *IncidentMeasure) error {
|
||||||
|
measure.ID = uuid.New()
|
||||||
|
measure.CreatedAt = time.Now().UTC()
|
||||||
|
if measure.Status == "" {
|
||||||
|
measure.Status = MeasureStatusPlanned
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO incident_measures (
|
||||||
|
id, incident_id, title, description, measure_type, status,
|
||||||
|
responsible, due_date, completed_at, created_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5, $6,
|
||||||
|
$7, $8, $9, $10
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
measure.ID, measure.IncidentID, measure.Title, measure.Description,
|
||||||
|
string(measure.MeasureType), string(measure.Status),
|
||||||
|
measure.Responsible, measure.DueDate, measure.CompletedAt, measure.CreatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListMeasures lists all measures for an incident
|
||||||
|
func (s *Store) ListMeasures(ctx context.Context, incidentID uuid.UUID) ([]IncidentMeasure, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, incident_id, title, description, measure_type, status,
|
||||||
|
responsible, due_date, completed_at, created_at
|
||||||
|
FROM incident_measures WHERE incident_id = $1
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
`, incidentID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var measures []IncidentMeasure
|
||||||
|
for rows.Next() {
|
||||||
|
var m IncidentMeasure
|
||||||
|
var measureType, status string
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&m.ID, &m.IncidentID, &m.Title, &m.Description,
|
||||||
|
&measureType, &status,
|
||||||
|
&m.Responsible, &m.DueDate, &m.CompletedAt, &m.CreatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
m.MeasureType = MeasureType(measureType)
|
||||||
|
m.Status = MeasureStatus(status)
|
||||||
|
|
||||||
|
measures = append(measures, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
return measures, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMeasure updates an existing measure
|
||||||
|
func (s *Store) UpdateMeasure(ctx context.Context, measure *IncidentMeasure) error {
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_measures SET
|
||||||
|
title = $2, description = $3, measure_type = $4, status = $5,
|
||||||
|
responsible = $6, due_date = $7, completed_at = $8
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
measure.ID, measure.Title, measure.Description,
|
||||||
|
string(measure.MeasureType), string(measure.Status),
|
||||||
|
measure.Responsible, measure.DueDate, measure.CompletedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompleteMeasure marks a measure as completed
|
||||||
|
func (s *Store) CompleteMeasure(ctx context.Context, id uuid.UUID) error {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_measures SET
|
||||||
|
status = $2,
|
||||||
|
completed_at = $3
|
||||||
|
WHERE id = $1
|
||||||
|
`, id, string(MeasureStatusCompleted), now)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Timeline Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddTimelineEntry appends a timeline entry to the incident's JSONB timeline array
|
||||||
|
func (s *Store) AddTimelineEntry(ctx context.Context, incidentID uuid.UUID, entry TimelineEntry) error {
|
||||||
|
entryJSON, err := json.Marshal(entry)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the || operator to append to the JSONB array
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
timeline = COALESCE(timeline, '[]'::jsonb) || $2::jsonb,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
`, incidentID, string(entryJSON))
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Close Incident
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CloseIncident closes an incident with root cause and lessons learned
|
||||||
|
func (s *Store) CloseIncident(ctx context.Context, id uuid.UUID, rootCause, lessonsLearned string) error {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE incident_incidents SET
|
||||||
|
status = $2,
|
||||||
|
root_cause = $3,
|
||||||
|
lessons_learned = $4,
|
||||||
|
closed_at = $5,
|
||||||
|
updated_at = $5
|
||||||
|
WHERE id = $1
|
||||||
|
`, id, string(IncidentStatusClosed), rootCause, lessonsLearned, now)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns aggregated incident statistics for a tenant
|
||||||
|
func (s *Store) GetStatistics(ctx context.Context, tenantID uuid.UUID) (*IncidentStatistics, error) {
|
||||||
|
stats := &IncidentStatistics{
|
||||||
|
ByStatus: make(map[string]int),
|
||||||
|
BySeverity: make(map[string]int),
|
||||||
|
ByCategory: make(map[string]int),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Total incidents
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM incident_incidents WHERE tenant_id = $1",
|
||||||
|
tenantID).Scan(&stats.TotalIncidents)
|
||||||
|
|
||||||
|
// Open incidents (not closed)
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM incident_incidents WHERE tenant_id = $1 AND status != 'closed'",
|
||||||
|
tenantID).Scan(&stats.OpenIncidents)
|
||||||
|
|
||||||
|
// By status
|
||||||
|
rows, err := s.pool.Query(ctx,
|
||||||
|
"SELECT status, COUNT(*) FROM incident_incidents WHERE tenant_id = $1 GROUP BY status",
|
||||||
|
tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var status string
|
||||||
|
var count int
|
||||||
|
rows.Scan(&status, &count)
|
||||||
|
stats.ByStatus[status] = count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// By severity
|
||||||
|
rows, err = s.pool.Query(ctx,
|
||||||
|
"SELECT severity, COUNT(*) FROM incident_incidents WHERE tenant_id = $1 GROUP BY severity",
|
||||||
|
tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var severity string
|
||||||
|
var count int
|
||||||
|
rows.Scan(&severity, &count)
|
||||||
|
stats.BySeverity[severity] = count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// By category
|
||||||
|
rows, err = s.pool.Query(ctx,
|
||||||
|
"SELECT category, COUNT(*) FROM incident_incidents WHERE tenant_id = $1 GROUP BY category",
|
||||||
|
tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var category string
|
||||||
|
var count int
|
||||||
|
rows.Scan(&category, &count)
|
||||||
|
stats.ByCategory[category] = count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notifications pending
|
||||||
|
s.pool.QueryRow(ctx, `
|
||||||
|
SELECT COUNT(*) FROM incident_incidents
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
AND (authority_notification->>'status' = 'pending'
|
||||||
|
OR data_subject_notification->>'status' = 'pending')
|
||||||
|
`, tenantID).Scan(&stats.NotificationsPending)
|
||||||
|
|
||||||
|
// Average resolution hours (for closed incidents)
|
||||||
|
s.pool.QueryRow(ctx, `
|
||||||
|
SELECT COALESCE(AVG(EXTRACT(EPOCH FROM (closed_at - detected_at)) / 3600), 0)
|
||||||
|
FROM incident_incidents
|
||||||
|
WHERE tenant_id = $1 AND status = 'closed' AND closed_at IS NOT NULL
|
||||||
|
`, tenantID).Scan(&stats.AvgResolutionHours)
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
65
ai-compliance-sdk/internal/industry/models.go
Normal file
65
ai-compliance-sdk/internal/industry/models.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package industry
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Industry-Specific Compliance Templates (Phase 3.3)
|
||||||
|
// Static reference data — no database migration needed.
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// IndustryTemplate represents a complete compliance package for a specific industry
|
||||||
|
type IndustryTemplate struct {
|
||||||
|
Slug string `json:"slug"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Icon string `json:"icon"`
|
||||||
|
Regulations []string `json:"regulations"`
|
||||||
|
VVTTemplates []VVTTemplate `json:"vvt_templates"`
|
||||||
|
TOMRecommendations []TOMRecommendation `json:"tom_recommendations"`
|
||||||
|
RiskScenarios []RiskScenario `json:"risk_scenarios"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// VVTTemplate represents a pre-configured processing activity record template
|
||||||
|
type VVTTemplate struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Purpose string `json:"purpose"`
|
||||||
|
LegalBasis string `json:"legal_basis"`
|
||||||
|
DataCategories []string `json:"data_categories"`
|
||||||
|
DataSubjects []string `json:"data_subjects"`
|
||||||
|
RetentionPeriod string `json:"retention_period"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TOMRecommendation represents a recommended technical/organizational measure
|
||||||
|
type TOMRecommendation struct {
|
||||||
|
Category string `json:"category"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Priority string `json:"priority"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RiskScenario represents an industry-specific data protection risk scenario
|
||||||
|
type RiskScenario struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Likelihood string `json:"likelihood"`
|
||||||
|
Impact string `json:"impact"`
|
||||||
|
Mitigation string `json:"mitigation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// IndustryListResponse is the API response for listing all industries
|
||||||
|
type IndustryListResponse struct {
|
||||||
|
Industries []IndustrySummary `json:"industries"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndustrySummary is a condensed view of an industry template for list endpoints
|
||||||
|
type IndustrySummary struct {
|
||||||
|
Slug string `json:"slug"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Icon string `json:"icon"`
|
||||||
|
RegulationCount int `json:"regulation_count"`
|
||||||
|
TemplateCount int `json:"template_count"`
|
||||||
|
}
|
||||||
558
ai-compliance-sdk/internal/industry/templates.go
Normal file
558
ai-compliance-sdk/internal/industry/templates.go
Normal file
@@ -0,0 +1,558 @@
|
|||||||
|
package industry
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Static Industry Template Data
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// allTemplates holds all pre-configured industry compliance packages.
|
||||||
|
// This is static reference data embedded in the binary — no database required.
|
||||||
|
var allTemplates = []IndustryTemplate{
|
||||||
|
itSoftwareTemplate(),
|
||||||
|
healthcareTemplate(),
|
||||||
|
financeTemplate(),
|
||||||
|
manufacturingTemplate(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllTemplates returns all available industry templates.
|
||||||
|
func GetAllTemplates() []IndustryTemplate {
|
||||||
|
return allTemplates
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTemplateBySlug returns the industry template matching the given slug,
|
||||||
|
// or nil if no match is found.
|
||||||
|
func GetTemplateBySlug(slug string) *IndustryTemplate {
|
||||||
|
for i := range allTemplates {
|
||||||
|
if allTemplates[i].Slug == slug {
|
||||||
|
return &allTemplates[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// IT & Software
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
func itSoftwareTemplate() IndustryTemplate {
|
||||||
|
return IndustryTemplate{
|
||||||
|
Slug: "it-software",
|
||||||
|
Name: "IT & Software",
|
||||||
|
Description: "Compliance-Paket fuer IT-Unternehmen, SaaS-Anbieter und Softwareentwickler mit Fokus auf AI Act, DSGVO fuer Cloud-Dienste und NIS2.",
|
||||||
|
Icon: "\U0001F4BB",
|
||||||
|
Regulations: []string{"DSGVO", "AI Act", "NIS2", "ePrivacy"},
|
||||||
|
|
||||||
|
VVTTemplates: []VVTTemplate{
|
||||||
|
{
|
||||||
|
Name: "SaaS-Kundendaten",
|
||||||
|
Purpose: "Verarbeitung personenbezogener Daten von SaaS-Kunden zur Bereitstellung der vertraglichen Dienstleistung, einschliesslich Account-Verwaltung, Nutzungsanalyse und Abrechnung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung)",
|
||||||
|
DataCategories: []string{"Name", "E-Mail-Adresse", "Unternehmenszugehoerigkeit", "Nutzungsdaten", "Rechnungsdaten", "IP-Adresse"},
|
||||||
|
DataSubjects: []string{"Kunden", "Endnutzer der SaaS-Plattform"},
|
||||||
|
RetentionPeriod: "Vertragsdauer + 10 Jahre (handelsrechtliche Aufbewahrungspflicht)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Cloud-Hosting",
|
||||||
|
Purpose: "Speicherung und Verarbeitung von Kundendaten in Cloud-Infrastruktur (IaaS/PaaS) zur Gewaehrleistung der Verfuegbarkeit und Skalierbarkeit der Dienste.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 28 DSGVO (Auftragsverarbeitung)",
|
||||||
|
DataCategories: []string{"Alle vom Kunden eingestellten Daten", "Metadaten", "Logdateien", "Zugangsdaten"},
|
||||||
|
DataSubjects: []string{"Kunden", "Endnutzer", "Mitarbeiter der Kunden"},
|
||||||
|
RetentionPeriod: "Vertragsdauer + 30 Tage Backup-Retention",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "KI-Modelltraining",
|
||||||
|
Purpose: "Verwendung von (pseudonymisierten) Daten zum Training, zur Validierung und Verbesserung von KI-/ML-Modellen unter Einhaltung des AI Act.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), ggf. Art. 6 Abs. 1 lit. a (Einwilligung)",
|
||||||
|
DataCategories: []string{"Pseudonymisierte Nutzungsdaten", "Textdaten", "Interaktionsmuster", "Feedback-Daten"},
|
||||||
|
DataSubjects: []string{"Nutzer der KI-Funktionen", "Trainingsdaten-Quellen"},
|
||||||
|
RetentionPeriod: "Bis Modell-Abloesung, max. 5 Jahre; Trainingsdaten nach Pseudonymisierung unbegrenzt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Software-Analytics",
|
||||||
|
Purpose: "Erhebung anonymisierter und pseudonymisierter Nutzungsstatistiken zur Produktverbesserung, Fehleranalyse und Performance-Monitoring.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse)",
|
||||||
|
DataCategories: []string{"Geraetemertkmale", "Browserinformationen", "Nutzungsverhalten", "Crash-Reports", "Performance-Metriken"},
|
||||||
|
DataSubjects: []string{"Endnutzer der Software"},
|
||||||
|
RetentionPeriod: "Rohdaten 90 Tage, aggregierte Daten 2 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Newsletter/Marketing",
|
||||||
|
Purpose: "Versand von Produkt-Newslettern, Release-Benachrichtigungen und Marketing-Kommunikation an registrierte Nutzer und Interessenten.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. a DSGVO (Einwilligung)",
|
||||||
|
DataCategories: []string{"E-Mail-Adresse", "Name", "Unternehmen", "Oeffnungs- und Klickraten", "Abonnement-Praeferenzen"},
|
||||||
|
DataSubjects: []string{"Newsletter-Abonnenten", "Leads", "Bestandskunden"},
|
||||||
|
RetentionPeriod: "Bis Widerruf der Einwilligung + 30 Tage Abwicklung",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Bewerbermanagement",
|
||||||
|
Purpose: "Verarbeitung von Bewerberdaten im Rahmen des Recruiting-Prozesses einschliesslich Sichtung, Kommunikation und Entscheidungsfindung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (vorvertragliche Massnahmen), ss 26 BDSG",
|
||||||
|
DataCategories: []string{"Lebenslauf", "Anschreiben", "Zeugnisse", "Kontaktdaten", "Gehaltsvorstellungen", "Bewertungsnotizen"},
|
||||||
|
DataSubjects: []string{"Bewerber", "Empfehlungsgeber"},
|
||||||
|
RetentionPeriod: "6 Monate nach Abschluss des Verfahrens (AGG-Frist), bei Einwilligung laenger",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
TOMRecommendations: []TOMRecommendation{
|
||||||
|
{
|
||||||
|
Category: "encryption",
|
||||||
|
Name: "Verschluesselung at rest und in transit",
|
||||||
|
Description: "Alle gespeicherten Daten mit AES-256 verschluesseln. Saemtlichen Netzwerkverkehr ueber TLS 1.3 absichern. Zertifikats-Management automatisieren.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "access_control",
|
||||||
|
Name: "Multi-Faktor-Authentifizierung (MFA)",
|
||||||
|
Description: "MFA fuer alle administrativen Zugaenge, Produktionssysteme und CI/CD-Pipelines erzwingen. FIDO2/WebAuthn bevorzugen.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "monitoring",
|
||||||
|
Name: "Penetration Testing",
|
||||||
|
Description: "Regelmaessige externe Penetrationstests (mind. jaehrlich) und kontinuierliche Schwachstellenscans der oeffentlich erreichbaren Infrastruktur durchfuehren.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "development",
|
||||||
|
Name: "Code Reviews und Secure Coding",
|
||||||
|
Description: "Verpflichtende Code-Reviews fuer alle Aenderungen. SAST/DAST-Tools in die CI/CD-Pipeline integrieren. OWASP Top 10 als Mindeststandard.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "supply_chain",
|
||||||
|
Name: "Dependency Scanning",
|
||||||
|
Description: "Automatisiertes Scanning aller Abhaengigkeiten (SBOM) auf bekannte Schwachstellen. Alerts bei kritischen CVEs. Regelmaessige Updates erzwingen.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "incident_response",
|
||||||
|
Name: "Incident Response Plan",
|
||||||
|
Description: "Dokumentierter Incident-Response-Prozess mit definierten Eskalationsstufen, Meldepflichten (72h DSGVO) und regelmaessigen Uebungen (Tabletop Exercises).",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
RiskScenarios: []RiskScenario{
|
||||||
|
{
|
||||||
|
Name: "Datenleck durch Cloud-Fehlkonfiguration",
|
||||||
|
Description: "Oeffentlich zugaengliche S3-Buckets, fehlende Netzwerk-Segmentierung oder falsch konfigurierte Firewalls legen Kundendaten offen.",
|
||||||
|
Likelihood: "high",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Infrastructure-as-Code mit automatisierten Compliance-Checks (z.B. Checkov, tfsec), Cloud Security Posture Management (CSPM) einsetzen, regelmaessige Audits der Cloud-Konfiguration.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Supply-Chain-Angriff",
|
||||||
|
Description: "Kompromittierte Abhaengigkeit (npm, PyPI, Go-Module) schleust Schadcode in den Build-Prozess ein und gelangt in die Produktionsumgebung.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Dependency Pinning, Signaturtruefung, SBOM-Generierung, private Registries, regelmaessige Audits aller Drittanbieter-Komponenten.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "KI-Bias und Diskriminierung",
|
||||||
|
Description: "KI-Modelle produzieren diskriminierende Ergebnisse aufgrund verzerrter Trainingsdaten. Verstoss gegen AI Act und Gleichbehandlungsgrundsaetze.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Bias-Audits vor und nach Deployment, diverse Trainingsdaten, Erklaerbarkeits-Dokumentation gemaess AI Act, menschliche Ueberpruefung (Human-in-the-Loop).",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Insider-Bedrohung",
|
||||||
|
Description: "Ein Mitarbeiter mit privilegiertem Zugang exfiltriert Kundendaten, Quellcode oder Geschaeftsgeheimnisse — absichtlich oder durch Social Engineering.",
|
||||||
|
Likelihood: "low",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Least-Privilege-Prinzip, privilegierte Zugangssteuerung (PAM), Audit-Logging aller Admin-Aktionen, Vier-Augen-Prinzip fuer kritische Operationen, Security-Awareness-Trainings.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Gesundheitswesen
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
func healthcareTemplate() IndustryTemplate {
|
||||||
|
return IndustryTemplate{
|
||||||
|
Slug: "healthcare",
|
||||||
|
Name: "Gesundheitswesen",
|
||||||
|
Description: "Compliance-Paket fuer Arztpraxen, Krankenhaeuser, Labore und Gesundheits-IT mit besonderem Fokus auf Art. 9 DSGVO (besondere Datenkategorien) und Patientendatenschutz.",
|
||||||
|
Icon: "\U0001F3E5",
|
||||||
|
Regulations: []string{"DSGVO", "BDSG \u00a722", "SGB V", "MDR", "DiGAV"},
|
||||||
|
|
||||||
|
VVTTemplates: []VVTTemplate{
|
||||||
|
{
|
||||||
|
Name: "Patientenakte (ePA)",
|
||||||
|
Purpose: "Fuehrung elektronischer Patientenakten zur medizinischen Dokumentation, Behandlungsplanung und abrechnungstechnischen Erfassung.",
|
||||||
|
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO i.V.m. \u00a722 BDSG, \u00a7630f BGB (Dokumentationspflicht)",
|
||||||
|
DataCategories: []string{"Diagnosen", "Befunde", "Medikation", "Vitalwerte", "Anamnese", "Stammdaten", "Versicherungsdaten"},
|
||||||
|
DataSubjects: []string{"Patienten"},
|
||||||
|
RetentionPeriod: "10 Jahre nach Abschluss der Behandlung (\u00a7630f BGB), bei Strahlentherapie 30 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Terminverwaltung",
|
||||||
|
Purpose: "Planung, Vergabe und Erinnerung von Behandlungsterminen einschliesslich Online-Terminbuchung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 9 Abs. 2 lit. h DSGVO",
|
||||||
|
DataCategories: []string{"Name", "Kontaktdaten", "Terminzeitpunkt", "Fachrichtung/Behandlungsgrund", "Versicherungsstatus"},
|
||||||
|
DataSubjects: []string{"Patienten", "Angehoerige (bei Terminerstellung fuer Dritte)"},
|
||||||
|
RetentionPeriod: "Vergangene Termine: 1 Jahr, bei medizinischer Relevanz gemaess Patientenakte",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Labor- und Befunddaten",
|
||||||
|
Purpose: "Erfassung, Uebermittlung und Archivierung von Laborergebnissen, bildgebenden Befunden und pathologischen Berichten.",
|
||||||
|
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO, \u00a710 MBO-Ae",
|
||||||
|
DataCategories: []string{"Laborwerte", "Bildgebung (DICOM)", "Pathologiebefunde", "Mikrobiologische Ergebnisse", "Genetische Daten"},
|
||||||
|
DataSubjects: []string{"Patienten"},
|
||||||
|
RetentionPeriod: "10 Jahre, genetische Daten 30 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Telemedizin",
|
||||||
|
Purpose: "Durchfuehrung von Videosprechstunden und telemedizinischen Konsultationen einschliesslich Uebertragung medizinischer Daten.",
|
||||||
|
LegalBasis: "Art. 9 Abs. 2 lit. h DSGVO, \u00a7630a BGB, Fernbehandlungs-Richtlinien",
|
||||||
|
DataCategories: []string{"Audio-/Videodaten", "Chatprotokolle", "Uebermittelte Dokumente", "Verbindungsmetadaten", "Behandlungsnotizen"},
|
||||||
|
DataSubjects: []string{"Patienten", "Behandelnde Aerzte"},
|
||||||
|
RetentionPeriod: "Aufzeichnungen gemaess Patientenakte (10 Jahre), Verbindungsdaten 90 Tage",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Forschungsdaten",
|
||||||
|
Purpose: "Verwendung pseudonymisierter oder anonymisierter Patientendaten fuer klinische Studien und medizinische Forschung.",
|
||||||
|
LegalBasis: "Art. 9 Abs. 2 lit. j DSGVO, \u00a727 BDSG, ggf. Einwilligung gemaess Art. 9 Abs. 2 lit. a",
|
||||||
|
DataCategories: []string{"Pseudonymisierte Diagnosen", "Behandlungsverlaeufe", "Demografische Daten", "Genetische Daten (anonymisiert)", "Studienergebnisse"},
|
||||||
|
DataSubjects: []string{"Studienteilnehmer", "Patienten (retrospektiv, pseudonymisiert)"},
|
||||||
|
RetentionPeriod: "Studienende + 15 Jahre (GCP-ICH), Forschungsdaten gemaess Foerderrichtlinien",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Abrechnung (KV/Krankenversicherung)",
|
||||||
|
Purpose: "Erstellung und Uebermittlung von Abrechnungsdaten an Kassenaerztliche Vereinigungen und Krankenkassen.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a7284 SGB V, \u00a7295 SGB V",
|
||||||
|
DataCategories: []string{"Versichertennummer", "Diagnose-Codes (ICD-10)", "Leistungsziffern (EBM/GOAe)", "Behandlungsdaten", "Zuzahlungsstatus"},
|
||||||
|
DataSubjects: []string{"Patienten", "Versicherte"},
|
||||||
|
RetentionPeriod: "10 Jahre (steuerrechtlich), Abrechnungsdaten 4 Jahre (\u00a7305 SGB V)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
TOMRecommendations: []TOMRecommendation{
|
||||||
|
{
|
||||||
|
Category: "encryption",
|
||||||
|
Name: "Ende-zu-Ende-Verschluesselung",
|
||||||
|
Description: "Saemtliche Kommunikation mit Gesundheitsdaten (E-Mail, Telemedizin, Befunduebermittlung) Ende-zu-Ende verschluesseln. Zertifizierte Loesungen gemaess gematik-Spezifikation einsetzen.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "access_control",
|
||||||
|
Name: "Rollenbasierte Zugriffskontrolle (RBAC)",
|
||||||
|
Description: "Feingranulare Zugriffsrechte basierend auf Behandlungskontext: Nur behandelnde Aerzte sehen relevante Patientendaten. Need-to-know-Prinzip konsequent umsetzen.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "monitoring",
|
||||||
|
Name: "Audit-Logging",
|
||||||
|
Description: "Lueckenloses Protokollieren aller Zugriffe auf Patientendaten mit Zeitstempel, Benutzer, Aktion und Begruendung. Logs manipulationssicher speichern (WORM).",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "physical_security",
|
||||||
|
Name: "Physische Sicherheit",
|
||||||
|
Description: "Zutrittskontrolle zu Serverraeumen und medizinischen Arbeitsbereichen. Bildschirmsperren, Clean-Desk-Policy. Sicherer Umgang mit physischen Patientenakten.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "data_minimization",
|
||||||
|
Name: "Pseudonymisierung",
|
||||||
|
Description: "Konsequente Pseudonymisierung bei Datenweitergabe (Forschung, Qualitaetssicherung, Abrechnung). Zuordnungstabellen separat und besonders geschuetzt speichern.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
RiskScenarios: []RiskScenario{
|
||||||
|
{
|
||||||
|
Name: "Unbefugter Zugriff auf Patientendaten",
|
||||||
|
Description: "Mitarbeiter ohne Behandlungsbezug greifen auf Patientenakten zu (z.B. prominente Patienten). Verstoss gegen aerztliche Schweigepflicht und DSGVO.",
|
||||||
|
Likelihood: "high",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Striktes RBAC mit Behandlungskontext-Pruefung, automatische Anomalie-Erkennung bei ungewoehnlichen Zugriffen, regelmaessige Audit-Log-Auswertung, Sanktionskatalog.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Ransomware-Angriff auf Krankenhaus-IT",
|
||||||
|
Description: "Verschluesselungstrojaner legt Krankenhaus-Informationssystem lahm. Patientenversorgung gefaehrdet, Notbetrieb erforderlich.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Netzwerksegmentierung (Medizingeraete, Verwaltung, Gaeste), Offline-Backups, Notfallplaene fuer Papierbetrieb, regelmaessige Sicherheitsupdates, Mitarbeiterschulung gegen Phishing.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Datenverlust bei Systemausfall",
|
||||||
|
Description: "Hardware-Defekt oder Softwarefehler fuehrt zum Verlust aktueller Patientendaten, Befunde oder Medikationsplaene.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Redundante Systeme (Clustering), automatische Backups mit verifizierter Wiederherstellung, unterbrechungsfreie Stromversorgung (USV), Disaster-Recovery-Plan mit RTOs unter 4 Stunden.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Verletzung der aerztlichen Schweigepflicht",
|
||||||
|
Description: "Versehentliche oder vorsaetzliche Weitergabe von Patientendaten an Unberechtigte (z.B. Angehoerige ohne Vollmacht, Arbeitgeber, Medien).",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Schulungen zur Schweigepflicht (\u00a7203 StGB), klare Prozesse fuer Auskunftsersuchen, Dokumentation von Einwilligungen und Vollmachten, sichere Kommunikationskanaele.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Finanzdienstleister
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
func financeTemplate() IndustryTemplate {
|
||||||
|
return IndustryTemplate{
|
||||||
|
Slug: "finance",
|
||||||
|
Name: "Finanzdienstleister",
|
||||||
|
Description: "Compliance-Paket fuer Banken, Versicherungen, Zahlungsdienstleister und FinTechs mit Fokus auf BaFin-Anforderungen, PSD2 und Geldwaeschepraeventions.",
|
||||||
|
Icon: "\U0001F3E6",
|
||||||
|
Regulations: []string{"DSGVO", "KWG", "ZAG", "GwG", "MaRisk", "BAIT/DORA", "PSD2"},
|
||||||
|
|
||||||
|
VVTTemplates: []VVTTemplate{
|
||||||
|
{
|
||||||
|
Name: "Kontoeroeffnung / KYC",
|
||||||
|
Purpose: "Identitaetspruefung und Legitimation von Neukunden im Rahmen der Know-Your-Customer-Pflichten gemaess Geldwaeschegesetz.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a710 GwG, \u00a7154 AO",
|
||||||
|
DataCategories: []string{"Personalausweisdaten", "Adressdaten", "Geburtsdatum", "Staatsangehoerigkeit", "PEP-Status", "Wirtschaftliche Berechtigung", "Video-Identifikation"},
|
||||||
|
DataSubjects: []string{"Neukunden", "Wirtschaftlich Berechtigte", "Vertretungsberechtigte"},
|
||||||
|
RetentionPeriod: "5 Jahre nach Ende der Geschaeftsbeziehung (\u00a78 GwG), Identifizierungsdaten 10 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Zahlungsverarbeitung",
|
||||||
|
Purpose: "Ausfuehrung und Dokumentation von Zahlungstransaktionen (Ueberweisungen, Lastschriften, Kartenzahlungen) im Rahmen der Kontovertragserfullung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), \u00a7675f BGB, PSD2",
|
||||||
|
DataCategories: []string{"IBAN/Kontonummer", "Transaktionsbetrag", "Verwendungszweck", "Empfaengerdaten", "Zeitstempel", "Autorisierungsdaten"},
|
||||||
|
DataSubjects: []string{"Kontoinhaber", "Zahlungsempfaenger", "Zahlungspflichtige"},
|
||||||
|
RetentionPeriod: "10 Jahre (\u00a7257 HGB, \u00a7147 AO)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Kreditpruefung / Scoring",
|
||||||
|
Purpose: "Bonitaetspruefung und Kreditwuerdigkeitsbewertung auf Basis interner und externer Daten zur Kreditentscheidung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (vorvertragliche Massnahmen), \u00a731 BDSG (Scoring)",
|
||||||
|
DataCategories: []string{"Einkommensnachweise", "Schufa-Score", "Beschaeftigungsstatus", "Bestehende Verbindlichkeiten", "Sicherheiten", "Scoring-Ergebnis"},
|
||||||
|
DataSubjects: []string{"Kreditantragsteller", "Buergen", "Mithaftende"},
|
||||||
|
RetentionPeriod: "Kreditlaufzeit + 3 Jahre, bei Ablehnung 6 Monate",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Wertpapierhandel",
|
||||||
|
Purpose: "Ausfuehrung und Dokumentation von Wertpapiergeschaeften, Anlageberatung und Geeignetheitspruefung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO, \u00a763 WpHG (Aufzeichnungspflichten), MiFID II",
|
||||||
|
DataCategories: []string{"Depotdaten", "Orderdaten", "Risikoprofil", "Anlageerfahrung", "Geeignetheitserklaerung", "Telefonaufzeichnungen"},
|
||||||
|
DataSubjects: []string{"Depotinhaber", "Bevollmaechtigte", "Anlageberater"},
|
||||||
|
RetentionPeriod: "10 Jahre (\u00a7257 HGB), Telefonaufzeichnungen 5 Jahre (MiFID II)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Geldwaesche-Monitoring",
|
||||||
|
Purpose: "Kontinuierliche Ueberwachung von Transaktionsmustern zur Erkennung verdaechtiger Aktivitaeten und Erfuellung der Meldepflichten gegenueber der FIU.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), \u00a325h KWG, \u00a756 GwG",
|
||||||
|
DataCategories: []string{"Transaktionshistorie", "Risikobewertung", "Verdachtsmeldungen (SAR)", "PEP-Screening-Ergebnisse", "Sanktionslistenabgleich"},
|
||||||
|
DataSubjects: []string{"Kunden", "Transaktionspartner", "Verdachtspersonen"},
|
||||||
|
RetentionPeriod: "5 Jahre nach Ende der Geschaeftsbeziehung (\u00a78 GwG), Verdachtsmeldungen 10 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Versicherungsantraege",
|
||||||
|
Purpose: "Verarbeitung von Antrags- und Risikodaten zur Pruefung, Annahme und Verwaltung von Versicherungsvertraegen.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), bei Gesundheitsdaten Art. 9 Abs. 2 lit. f DSGVO",
|
||||||
|
DataCategories: []string{"Antragsdaten", "Gesundheitsfragen", "Schadenhistorie", "Risikofaktoren", "Praemienberechnung", "Leistungsansprueche"},
|
||||||
|
DataSubjects: []string{"Versicherungsnehmer", "Versicherte Personen", "Bezugsberechtigte", "Geschaedigte"},
|
||||||
|
RetentionPeriod: "Vertragsdauer + 10 Jahre (Verjaehrung), Lebensversicherung bis Ablauf aller Ansprueche",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
TOMRecommendations: []TOMRecommendation{
|
||||||
|
{
|
||||||
|
Category: "encryption",
|
||||||
|
Name: "HSM fuer Schluesselverwaltung",
|
||||||
|
Description: "Hardware Security Modules (HSM) fuer kryptographische Schluessel, insbesondere bei Zahlungsverkehr und digitalen Signaturen. PCI-DSS-konform.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "monitoring",
|
||||||
|
Name: "Transaktionsmonitoring",
|
||||||
|
Description: "Echtzeit-Ueberwachung aller Finanztransaktionen auf Anomalien, Betrugsversuche und verdaechtige Muster. Regelbasierte und KI-gestuetzte Erkennung.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "access_control",
|
||||||
|
Name: "Vier-Augen-Prinzip",
|
||||||
|
Description: "Kritische Transaktionen (Kreditfreigaben, Grossueberweisungen, Konfigurationsaenderungen) benoetigen Freigabe durch zwei unabhaengige Personen.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "network_security",
|
||||||
|
Name: "DDoS-Schutz",
|
||||||
|
Description: "Mehrstufiger DDoS-Schutz fuer Online-Banking und Zahlungsverkehr-Infrastruktur. Redundante Anbindung, Traffic-Scrubbing, automatische Skalierung.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "business_continuity",
|
||||||
|
Name: "Backup und Disaster Recovery",
|
||||||
|
Description: "Taeglich gesicherte Datenbanken mit geografisch getrennter Aufbewahrung. RTO unter 2 Stunden fuer Kernbanksysteme, RPO unter 15 Minuten.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "testing",
|
||||||
|
Name: "Penetration Testing (TIBER-EU)",
|
||||||
|
Description: "Threat-Intelligence-basierte Red-Teaming-Tests gemaess TIBER-EU-Framework. Jaehrliche Durchfuehrung durch externe, BaFin-akkreditierte Tester.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
RiskScenarios: []RiskScenario{
|
||||||
|
{
|
||||||
|
Name: "Betrug und Identitaetsdiebstahl",
|
||||||
|
Description: "Kriminelle nutzen gestohlene Identitaetsdaten zur Kontoeroeffnung, Kreditaufnahme oder fuer nicht autorisierte Transaktionen.",
|
||||||
|
Likelihood: "high",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Starke Kundenauthentifizierung (SCA) gemaess PSD2, Echtzeit-Betrugs-Scoring, Video-Ident mit Liveness-Detection, biometrische Verifikation, Transaktionslimits.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Insiderhandel-Datenleck",
|
||||||
|
Description: "Vorabinformationen ueber boersenrelevante Entscheidungen (M&A, Quartalsberichte) gelangen an Unberechtigte.",
|
||||||
|
Likelihood: "low",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Insiderverzeichnisse fuehren, Chinese Walls zwischen Abteilungen, Kommunikations-Monitoring, Handelsverbote fuer Insider, regelmaessige Compliance-Schulungen.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Systemausfall bei Zahlungsverkehr",
|
||||||
|
Description: "Ausfall des Kernbanksystems oder der Zahlungsverkehrsinfrastruktur fuehrt zu Nicht-Verfuegbarkeit von Transaktionen, Geldautomaten und Online-Banking.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Hochverfuegbarkeits-Architektur (Active-Active), automatischer Failover, regelmaessige Disaster-Recovery-Tests, Notfall-Kommunikationsplan fuer Kunden und BaFin.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Geldwaesche-Compliance-Verstoss",
|
||||||
|
Description: "Mangelhafte KYC-Prozesse oder unzureichendes Transaktionsmonitoring fuehren zu einem Compliance-Verstoss mit BaFin-Sanktionen.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Automatisiertes Transaction-Monitoring mit regelmaessiger Kalibrierung, jaehrliche GwG-Schulungen, interne Revision der AML-Prozesse, PEP- und Sanktionslisten-Screening in Echtzeit.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Produktion / Industrie
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
func manufacturingTemplate() IndustryTemplate {
|
||||||
|
return IndustryTemplate{
|
||||||
|
Slug: "manufacturing",
|
||||||
|
Name: "Produktion / Industrie",
|
||||||
|
Description: "Compliance-Paket fuer produzierende Unternehmen mit Fokus auf NIS2-Anforderungen, OT-Security, IoT-Sicherheit und Schutz industrieller Steuerungssysteme.",
|
||||||
|
Icon: "\U0001F3ED",
|
||||||
|
Regulations: []string{"DSGVO", "NIS2", "Maschinenverordnung", "BetrSichV", "IT-Sicherheitsgesetz 2.0"},
|
||||||
|
|
||||||
|
VVTTemplates: []VVTTemplate{
|
||||||
|
{
|
||||||
|
Name: "Mitarbeiterdaten / Zeiterfassung",
|
||||||
|
Purpose: "Erfassung von Arbeitszeiten, Schichtplanung und Anwesenheitsdaten zur Lohnabrechnung und Einhaltung des Arbeitszeitgesetzes.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), \u00a726 BDSG, \u00a716 ArbZG",
|
||||||
|
DataCategories: []string{"Mitarbeiterstammdaten", "Arbeitszeitdaten", "Schichtplaene", "Fehlzeiten", "Ueberstunden", "Zutrittsdaten"},
|
||||||
|
DataSubjects: []string{"Mitarbeiter", "Leiharbeiter", "Praktikanten"},
|
||||||
|
RetentionPeriod: "Lohnunterlagen 6 Jahre (\u00a7257 HGB), Arbeitszeitnachweise 2 Jahre (\u00a716 ArbZG)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Lieferantenmanagement",
|
||||||
|
Purpose: "Verwaltung von Lieferantendaten, Bestellprozessen und Qualitaetsbewertungen im Rahmen der Supply-Chain.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. b DSGVO (Vertragserfullung), Art. 6 Abs. 1 lit. f (berechtigtes Interesse)",
|
||||||
|
DataCategories: []string{"Ansprechpartner", "Kontaktdaten", "Lieferkonditionen", "Qualitaetsbewertungen", "Zertifizierungen", "Bankverbindungen"},
|
||||||
|
DataSubjects: []string{"Ansprechpartner der Lieferanten", "Subunternehmer"},
|
||||||
|
RetentionPeriod: "Vertragsdauer + 10 Jahre (Gewaehrleistung und Steuerrecht)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "IoT-Sensordaten",
|
||||||
|
Purpose: "Erfassung und Auswertung von Sensor- und Maschinendaten fuer Produktionsoptimierung, Predictive Maintenance und Qualitaetssicherung.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), bei Personenbezug ggf. Art. 6 Abs. 1 lit. a (Einwilligung)",
|
||||||
|
DataCategories: []string{"Maschinenkennung", "Temperatur/Druck/Vibration", "Produktionszaehler", "Energieverbrauch", "Standortdaten (Intralogistik)", "Bediener-ID (falls zugeordnet)"},
|
||||||
|
DataSubjects: []string{"Maschinenbediener (indirekt)", "Instandhalter"},
|
||||||
|
RetentionPeriod: "Rohdaten 1 Jahr, aggregierte Daten 5 Jahre, qualitaetsrelevant 10 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Qualitaetskontrolle",
|
||||||
|
Purpose: "Dokumentation von Qualitaetspruefungen, Chargenrueckverfolgbarkeit und Reklamationsmanagement.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. c DSGVO (rechtliche Verpflichtung), Maschinenverordnung, Produkthaftung",
|
||||||
|
DataCategories: []string{"Pruefprotokolle", "Chargennnummern", "Messwerte", "Pruefer-ID", "Fotos/Videos der Pruefung", "Reklamationsdaten"},
|
||||||
|
DataSubjects: []string{"Pruefer", "Reklamierende Kunden"},
|
||||||
|
RetentionPeriod: "Produktlebensdauer + 10 Jahre (Produkthaftung), sicherheitskritisch 30 Jahre",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Videoueberwachung",
|
||||||
|
Purpose: "Ueberwachung von Produktionshallen, Lagerbereichen und Aussenbereichen zum Schutz vor Diebstahl, Sabotage und zur Arbeitssicherheit.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), Betriebsvereinbarung",
|
||||||
|
DataCategories: []string{"Videoaufnahmen", "Zeitstempel", "Kamerastandort", "Bewegungserkennung"},
|
||||||
|
DataSubjects: []string{"Mitarbeiter", "Besucher", "Lieferanten", "Unbefugte"},
|
||||||
|
RetentionPeriod: "72 Stunden Standard, bei Vorfaellen bis Abschluss der Ermittlung (max. 10 Tage ohne konkreten Anlass)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Zugangskontrolle (physisch und logisch)",
|
||||||
|
Purpose: "Steuerung und Protokollierung des Zutritts zu Produktionsbereichen, Gefahrstofflagern und IT-Raeumen mittels Chipkarten/Biometrie.",
|
||||||
|
LegalBasis: "Art. 6 Abs. 1 lit. f DSGVO (berechtigtes Interesse), BetrSichV, bei Biometrie Art. 9 Abs. 2 lit. b DSGVO",
|
||||||
|
DataCategories: []string{"Mitarbeiter-ID", "Zutrittszeitpunkt", "Zutrittsbereich", "Chipkartennummer", "Biometrische Daten (optional)"},
|
||||||
|
DataSubjects: []string{"Mitarbeiter", "Externe Dienstleister", "Besucher"},
|
||||||
|
RetentionPeriod: "Zutrittsprotokolle 90 Tage, sicherheitsrelevante Bereiche 1 Jahr",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
TOMRecommendations: []TOMRecommendation{
|
||||||
|
{
|
||||||
|
Category: "network_security",
|
||||||
|
Name: "Netzwerksegmentierung (IT/OT)",
|
||||||
|
Description: "Strikte Trennung von Office-IT und Operational Technology (OT) durch DMZ, Firewalls und unidirektionale Gateways. Purdue-Modell als Referenzarchitektur.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "patch_management",
|
||||||
|
Name: "IoT-Patch-Management",
|
||||||
|
Description: "Zentrales Management aller IoT-Geraete und Firmware-Versionen. Geplante Wartungsfenster fuer Updates, Risikobewertung vor Patches auf Produktionssystemen.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "physical_security",
|
||||||
|
Name: "Physische Zutrittskontrolle",
|
||||||
|
Description: "Mehrstufiges Zutrittskonzept (Gelaende, Gebaeude, Produktionshalle, Leitstand). Besuchermanagement, Begleitung in Sicherheitsbereichen, Videoprotokollierung.",
|
||||||
|
Priority: "high",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "business_continuity",
|
||||||
|
Name: "Backup industrieller Steuerungen",
|
||||||
|
Description: "Regelmaessige Sicherung von SPS-Programmen, SCADA-Konfigurationen und Roboterprogrammen. Offline-Aufbewahrung der Backups, dokumentierte Restore-Prozeduren.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Category: "incident_response",
|
||||||
|
Name: "Notfallplaene fuer Produktionsausfall",
|
||||||
|
Description: "Dokumentierte Notfallplaene fuer Cyber-Angriffe auf OT-Systeme. Manuelle Rueckfallebenen, Kommunikationsketten, Kontakt zu BSI und CERT. Jaehrliche Uebungen.",
|
||||||
|
Priority: "critical",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
RiskScenarios: []RiskScenario{
|
||||||
|
{
|
||||||
|
Name: "OT-Cyberangriff auf Produktionsanlage",
|
||||||
|
Description: "Angreifer kompromittiert SCADA/SPS-Systeme und manipuliert Produktionsprozesse. Moegliche Folgen: Produktionsausfall, Qualitaetsmaengel, Personengefaehrdung.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "Netzwerksegmentierung (IT/OT), Anomalie-Erkennung im OT-Netzwerk, Haertung der Steuerungssysteme, Deaktivierung nicht benoetigter Dienste und Ports, regelmaessige Sicherheitsaudits.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Ausfall der Lieferkette durch Cybervorfall",
|
||||||
|
Description: "Ein Cyberangriff auf einen kritischen Zulieferer fuehrt zum Stillstand der eigenen Produktion mangels Materialverfuegbarkeit oder kompromittierter Daten.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Diversifikation der Lieferantenbasis, vertragliche Cybersecurity-Anforderungen an Zulieferer, regelmaessige Risikobewertung der Supply Chain, Notfallbestaende fuer kritische Komponenten.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Industriespionage",
|
||||||
|
Description: "Wettbewerber oder staatliche Akteure greifen Konstruktionsdaten, Fertigungsverfahren oder strategische Planungen ab.",
|
||||||
|
Likelihood: "medium",
|
||||||
|
Impact: "critical",
|
||||||
|
Mitigation: "DLP-Loesungen (Data Loss Prevention), Verschluesselung von CAD/CAM-Daten, Geheimhaltungsvereinbarungen, Informationsklassifizierung, USB-Port-Kontrolle, Mitarbeiter-Sensibilisierung.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "IoT-Botnet-Kompromittierung",
|
||||||
|
Description: "Ungepatchte IoT-Sensoren und Aktoren werden Teil eines Botnets und dienen als Angriffsinfrastruktur oder Einfallstor ins Unternehmensnetz.",
|
||||||
|
Likelihood: "high",
|
||||||
|
Impact: "high",
|
||||||
|
Mitigation: "Default-Passwoerter aendern, Firmware-Updates automatisieren, IoT-Geraete in eigenem VLAN isolieren, Netzwerk-Traffic-Monitoring, Geraete-Inventar fuehren, unsichere Geraete ersetzen.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
77
ai-compliance-sdk/internal/multitenant/models.go
Normal file
77
ai-compliance-sdk/internal/multitenant/models.go
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
package multitenant
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TenantOverview provides a consolidated view of a tenant's compliance status
|
||||||
|
// including scores, module highlights, and namespace information.
|
||||||
|
type TenantOverview struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Slug string `json:"slug"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
MaxUsers int `json:"max_users"`
|
||||||
|
LLMQuotaMonthly int `json:"llm_quota_monthly"`
|
||||||
|
ComplianceScore int `json:"compliance_score"`
|
||||||
|
RiskLevel string `json:"risk_level"`
|
||||||
|
NamespaceCount int `json:"namespace_count"`
|
||||||
|
|
||||||
|
// Module highlights
|
||||||
|
OpenIncidents int `json:"open_incidents"`
|
||||||
|
OpenReports int `json:"open_reports"` // whistleblower
|
||||||
|
PendingDSRs int `json:"pending_dsrs"`
|
||||||
|
TrainingRate float64 `json:"training_completion_rate"`
|
||||||
|
VendorRiskHigh int `json:"vendor_risk_high"`
|
||||||
|
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultiTenantOverviewResponse wraps the list of tenant overviews with aggregate metrics.
|
||||||
|
type MultiTenantOverviewResponse struct {
|
||||||
|
Tenants []TenantOverview `json:"tenants"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
AverageScore int `json:"average_score"`
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTenantRequest represents a request to create a new tenant.
|
||||||
|
type CreateTenantRequest struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Slug string `json:"slug" binding:"required"`
|
||||||
|
MaxUsers int `json:"max_users"`
|
||||||
|
LLMQuotaMonthly int `json:"llm_quota_monthly"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTenantRequest represents a partial update to an existing tenant.
|
||||||
|
// Pointer fields allow distinguishing between "not provided" and "zero value".
|
||||||
|
type UpdateTenantRequest struct {
|
||||||
|
Name *string `json:"name"`
|
||||||
|
MaxUsers *int `json:"max_users"`
|
||||||
|
LLMQuotaMonthly *int `json:"llm_quota_monthly"`
|
||||||
|
Status *string `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateNamespaceRequest represents a request to create a new namespace within a tenant.
|
||||||
|
type CreateNamespaceRequest struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Slug string `json:"slug" binding:"required"`
|
||||||
|
IsolationLevel string `json:"isolation_level"`
|
||||||
|
DataClassification string `json:"data_classification"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SwitchTenantRequest represents a request to switch the active tenant context.
|
||||||
|
type SwitchTenantRequest struct {
|
||||||
|
TenantID string `json:"tenant_id" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SwitchTenantResponse contains the tenant info needed for the frontend to switch context.
|
||||||
|
type SwitchTenantResponse struct {
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
TenantName string `json:"tenant_name"`
|
||||||
|
TenantSlug string `json:"tenant_slug"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
}
|
||||||
148
ai-compliance-sdk/internal/multitenant/store.go
Normal file
148
ai-compliance-sdk/internal/multitenant/store.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package multitenant
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/rbac"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/reporting"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store provides aggregated multi-tenant views by combining data from the
|
||||||
|
// existing RBAC store, reporting store, and direct SQL queries for module highlights.
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
rbacStore *rbac.Store
|
||||||
|
reportingStore *reporting.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new multi-tenant store.
|
||||||
|
func NewStore(pool *pgxpool.Pool, rbacStore *rbac.Store, reportingStore *reporting.Store) *Store {
|
||||||
|
return &Store{
|
||||||
|
pool: pool,
|
||||||
|
rbacStore: rbacStore,
|
||||||
|
reportingStore: reportingStore,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOverview retrieves all tenants with their compliance scores and module highlights.
|
||||||
|
// It aggregates data from the RBAC tenant list, the reporting compliance score,
|
||||||
|
// and direct SQL counts for namespaces, incidents, reports, DSRs, training, and vendors.
|
||||||
|
// Individual query failures are tolerated and result in zero-value defaults.
|
||||||
|
func (s *Store) GetOverview(ctx context.Context) (*MultiTenantOverviewResponse, error) {
|
||||||
|
tenants, err := s.rbacStore.ListTenants(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to list tenants: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
overviews := make([]TenantOverview, 0, len(tenants))
|
||||||
|
totalScore := 0
|
||||||
|
|
||||||
|
for _, tenant := range tenants {
|
||||||
|
overview := s.buildTenantOverview(ctx, tenant)
|
||||||
|
totalScore += overview.ComplianceScore
|
||||||
|
overviews = append(overviews, overview)
|
||||||
|
}
|
||||||
|
|
||||||
|
averageScore := 0
|
||||||
|
if len(overviews) > 0 {
|
||||||
|
averageScore = totalScore / len(overviews)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &MultiTenantOverviewResponse{
|
||||||
|
Tenants: overviews,
|
||||||
|
Total: len(overviews),
|
||||||
|
AverageScore: averageScore,
|
||||||
|
GeneratedAt: time.Now().UTC(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTenantDetail returns detailed compliance info for a specific tenant.
|
||||||
|
func (s *Store) GetTenantDetail(ctx context.Context, tenantID uuid.UUID) (*TenantOverview, error) {
|
||||||
|
tenant, err := s.rbacStore.GetTenant(ctx, tenantID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get tenant: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
overview := s.buildTenantOverview(ctx, tenant)
|
||||||
|
return &overview, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildTenantOverview constructs a TenantOverview by fetching compliance scores
|
||||||
|
// and module highlights for a single tenant. Errors are logged but do not
|
||||||
|
// propagate -- missing data defaults to zero values.
|
||||||
|
func (s *Store) buildTenantOverview(ctx context.Context, tenant *rbac.Tenant) TenantOverview {
|
||||||
|
overview := TenantOverview{
|
||||||
|
ID: tenant.ID,
|
||||||
|
Name: tenant.Name,
|
||||||
|
Slug: tenant.Slug,
|
||||||
|
Status: string(tenant.Status),
|
||||||
|
MaxUsers: tenant.MaxUsers,
|
||||||
|
LLMQuotaMonthly: tenant.LLMQuotaMonthly,
|
||||||
|
CreatedAt: tenant.CreatedAt,
|
||||||
|
UpdatedAt: tenant.UpdatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compliance score and risk level derived from an executive report.
|
||||||
|
// GenerateReport computes the compliance score and risk overview internally.
|
||||||
|
report, err := s.reportingStore.GenerateReport(ctx, tenant.ID)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("multitenant: failed to generate report for tenant %s: %v", tenant.ID, err)
|
||||||
|
} else {
|
||||||
|
overview.ComplianceScore = report.ComplianceScore
|
||||||
|
overview.RiskLevel = report.RiskOverview.OverallLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
// Namespace count
|
||||||
|
overview.NamespaceCount = s.countSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COUNT(*) FROM compliance_namespaces WHERE tenant_id = $1")
|
||||||
|
|
||||||
|
// Open incidents
|
||||||
|
overview.OpenIncidents = s.countSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COUNT(*) FROM incidents WHERE tenant_id = $1 AND status IN ('new', 'investigating', 'containment')")
|
||||||
|
|
||||||
|
// Open whistleblower reports
|
||||||
|
overview.OpenReports = s.countSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1 AND status IN ('new', 'acknowledged', 'investigating')")
|
||||||
|
|
||||||
|
// Pending DSR requests
|
||||||
|
overview.PendingDSRs = s.countSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COUNT(*) FROM dsr_requests WHERE tenant_id = $1 AND status IN ('new', 'in_progress')")
|
||||||
|
|
||||||
|
// Training completion rate (average progress, 0-100)
|
||||||
|
overview.TrainingRate = s.avgSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COALESCE(AVG(CASE WHEN status = 'completed' THEN 100.0 ELSE progress END), 0) FROM academy_enrollments WHERE tenant_id = $1")
|
||||||
|
|
||||||
|
// High-risk vendors
|
||||||
|
overview.VendorRiskHigh = s.countSafe(ctx, tenant.ID,
|
||||||
|
"SELECT COUNT(*) FROM vendors WHERE tenant_id = $1 AND risk_level = 'high'")
|
||||||
|
|
||||||
|
return overview
|
||||||
|
}
|
||||||
|
|
||||||
|
// countSafe executes a COUNT(*) query that takes a single tenant_id parameter.
|
||||||
|
// If the query fails for any reason (e.g. table does not exist), it returns 0.
|
||||||
|
func (s *Store) countSafe(ctx context.Context, tenantID uuid.UUID, query string) int {
|
||||||
|
var count int
|
||||||
|
err := s.pool.QueryRow(ctx, query, tenantID).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
// Tolerate errors -- table may not exist or query may fail
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// avgSafe executes an AVG query that takes a single tenant_id parameter.
|
||||||
|
// If the query fails for any reason, it returns 0.
|
||||||
|
func (s *Store) avgSafe(ctx context.Context, tenantID uuid.UUID, query string) float64 {
|
||||||
|
var avg float64
|
||||||
|
err := s.pool.QueryRow(ctx, query, tenantID).Scan(&avg)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return avg
|
||||||
|
}
|
||||||
97
ai-compliance-sdk/internal/reporting/models.go
Normal file
97
ai-compliance-sdk/internal/reporting/models.go
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
package reporting
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
type ExecutiveReport struct {
|
||||||
|
GeneratedAt time.Time `json:"generated_at"`
|
||||||
|
TenantID string `json:"tenant_id"`
|
||||||
|
ComplianceScore int `json:"compliance_score"` // 0-100 overall score
|
||||||
|
|
||||||
|
// Module summaries
|
||||||
|
DSGVO DSGVOSummary `json:"dsgvo"`
|
||||||
|
Vendors VendorSummary `json:"vendors"`
|
||||||
|
Incidents IncidentSummary `json:"incidents"`
|
||||||
|
Whistleblower WhistleblowerSummary `json:"whistleblower"`
|
||||||
|
Academy AcademySummary `json:"academy"`
|
||||||
|
|
||||||
|
// Cross-module metrics
|
||||||
|
RiskOverview RiskOverview `json:"risk_overview"`
|
||||||
|
UpcomingDeadlines []Deadline `json:"upcoming_deadlines"`
|
||||||
|
RecentActivity []ActivityEntry `json:"recent_activity"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DSGVOSummary struct {
|
||||||
|
ProcessingActivities int `json:"processing_activities"`
|
||||||
|
ActiveProcessings int `json:"active_processings"`
|
||||||
|
TOMsImplemented int `json:"toms_implemented"`
|
||||||
|
TOMsPlanned int `json:"toms_planned"`
|
||||||
|
TOMsTotal int `json:"toms_total"`
|
||||||
|
CompletionPercent int `json:"completion_percent"` // TOMsImplemented / total * 100
|
||||||
|
OpenDSRs int `json:"open_dsrs"`
|
||||||
|
OverdueDSRs int `json:"overdue_dsrs"`
|
||||||
|
DSFAsCompleted int `json:"dsfas_completed"`
|
||||||
|
RetentionPolicies int `json:"retention_policies"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type VendorSummary struct {
|
||||||
|
TotalVendors int `json:"total_vendors"`
|
||||||
|
ActiveVendors int `json:"active_vendors"`
|
||||||
|
ByRiskLevel map[string]int `json:"by_risk_level"`
|
||||||
|
PendingReviews int `json:"pending_reviews"`
|
||||||
|
ExpiredContracts int `json:"expired_contracts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type IncidentSummary struct {
|
||||||
|
TotalIncidents int `json:"total_incidents"`
|
||||||
|
OpenIncidents int `json:"open_incidents"`
|
||||||
|
CriticalIncidents int `json:"critical_incidents"`
|
||||||
|
NotificationsPending int `json:"notifications_pending"`
|
||||||
|
AvgResolutionHours float64 `json:"avg_resolution_hours"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type WhistleblowerSummary struct {
|
||||||
|
TotalReports int `json:"total_reports"`
|
||||||
|
OpenReports int `json:"open_reports"`
|
||||||
|
OverdueAcknowledgments int `json:"overdue_acknowledgments"`
|
||||||
|
OverdueFeedbacks int `json:"overdue_feedbacks"`
|
||||||
|
AvgResolutionDays float64 `json:"avg_resolution_days"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AcademySummary struct {
|
||||||
|
TotalCourses int `json:"total_courses"`
|
||||||
|
TotalEnrollments int `json:"total_enrollments"`
|
||||||
|
CompletionRate float64 `json:"completion_rate"` // 0-100
|
||||||
|
OverdueCount int `json:"overdue_count"`
|
||||||
|
AvgCompletionDays float64 `json:"avg_completion_days"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RiskOverview struct {
|
||||||
|
OverallLevel string `json:"overall_level"` // LOW, MEDIUM, HIGH, CRITICAL
|
||||||
|
ModuleRisks []ModuleRisk `json:"module_risks"`
|
||||||
|
OpenFindings int `json:"open_findings"`
|
||||||
|
CriticalFindings int `json:"critical_findings"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ModuleRisk struct {
|
||||||
|
Module string `json:"module"`
|
||||||
|
Level string `json:"level"` // LOW, MEDIUM, HIGH, CRITICAL
|
||||||
|
Score int `json:"score"` // 0-100
|
||||||
|
Issues int `json:"issues"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Deadline struct {
|
||||||
|
Module string `json:"module"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
DueDate time.Time `json:"due_date"`
|
||||||
|
DaysLeft int `json:"days_left"`
|
||||||
|
Severity string `json:"severity"` // INFO, WARNING, URGENT, OVERDUE
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActivityEntry struct {
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Module string `json:"module"`
|
||||||
|
Action string `json:"action"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
UserID string `json:"user_id,omitempty"`
|
||||||
|
}
|
||||||
520
ai-compliance-sdk/internal/reporting/store.go
Normal file
520
ai-compliance-sdk/internal/reporting/store.go
Normal file
@@ -0,0 +1,520 @@
|
|||||||
|
package reporting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"math"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/academy"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/dsgvo"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/incidents"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/vendor"
|
||||||
|
"github.com/breakpilot/ai-compliance-sdk/internal/whistleblower"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
dsgvoStore *dsgvo.Store
|
||||||
|
vendorStore *vendor.Store
|
||||||
|
incidentStore *incidents.Store
|
||||||
|
whistleStore *whistleblower.Store
|
||||||
|
academyStore *academy.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewStore(pool *pgxpool.Pool, ds *dsgvo.Store, vs *vendor.Store, is *incidents.Store, ws *whistleblower.Store, as *academy.Store) *Store {
|
||||||
|
return &Store{
|
||||||
|
pool: pool,
|
||||||
|
dsgvoStore: ds,
|
||||||
|
vendorStore: vs,
|
||||||
|
incidentStore: is,
|
||||||
|
whistleStore: ws,
|
||||||
|
academyStore: as,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) GenerateReport(ctx context.Context, tenantID uuid.UUID) (*ExecutiveReport, error) {
|
||||||
|
report := &ExecutiveReport{
|
||||||
|
GeneratedAt: time.Now().UTC(),
|
||||||
|
TenantID: tenantID.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
tid := tenantID.String()
|
||||||
|
|
||||||
|
// 1. Gather DSGVO stats
|
||||||
|
dsgvoStats, err := s.dsgvoStore.GetStats(ctx, tenantID)
|
||||||
|
if err == nil && dsgvoStats != nil {
|
||||||
|
total := dsgvoStats.TOMsImplemented + dsgvoStats.TOMsPlanned
|
||||||
|
pct := 0
|
||||||
|
if total > 0 {
|
||||||
|
pct = int(math.Round(float64(dsgvoStats.TOMsImplemented) / float64(total) * 100))
|
||||||
|
}
|
||||||
|
report.DSGVO = DSGVOSummary{
|
||||||
|
ProcessingActivities: dsgvoStats.ProcessingActivities,
|
||||||
|
ActiveProcessings: dsgvoStats.ActiveProcessings,
|
||||||
|
TOMsImplemented: dsgvoStats.TOMsImplemented,
|
||||||
|
TOMsPlanned: dsgvoStats.TOMsPlanned,
|
||||||
|
TOMsTotal: total,
|
||||||
|
CompletionPercent: pct,
|
||||||
|
OpenDSRs: dsgvoStats.OpenDSRs,
|
||||||
|
OverdueDSRs: dsgvoStats.OverdueDSRs,
|
||||||
|
DSFAsCompleted: dsgvoStats.DSFAsCompleted,
|
||||||
|
RetentionPolicies: dsgvoStats.RetentionPolicies,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Gather vendor stats
|
||||||
|
vendorStats, err := s.vendorStore.GetVendorStats(ctx, tid)
|
||||||
|
if err == nil && vendorStats != nil {
|
||||||
|
active := 0
|
||||||
|
if v, ok := vendorStats.ByStatus["ACTIVE"]; ok {
|
||||||
|
active = v
|
||||||
|
}
|
||||||
|
report.Vendors = VendorSummary{
|
||||||
|
TotalVendors: vendorStats.TotalVendors,
|
||||||
|
ActiveVendors: active,
|
||||||
|
ByRiskLevel: vendorStats.ByRiskLevel,
|
||||||
|
PendingReviews: vendorStats.PendingReviews,
|
||||||
|
ExpiredContracts: vendorStats.ExpiredContracts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Gather incident stats
|
||||||
|
incidentStats, err := s.incidentStore.GetStatistics(ctx, tenantID)
|
||||||
|
if err == nil && incidentStats != nil {
|
||||||
|
critical := 0
|
||||||
|
if v, ok := incidentStats.BySeverity["CRITICAL"]; ok {
|
||||||
|
critical = v
|
||||||
|
}
|
||||||
|
report.Incidents = IncidentSummary{
|
||||||
|
TotalIncidents: incidentStats.TotalIncidents,
|
||||||
|
OpenIncidents: incidentStats.OpenIncidents,
|
||||||
|
CriticalIncidents: critical,
|
||||||
|
NotificationsPending: incidentStats.NotificationsPending,
|
||||||
|
AvgResolutionHours: incidentStats.AvgResolutionHours,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Gather whistleblower stats
|
||||||
|
whistleStats, err := s.whistleStore.GetStatistics(ctx, tenantID)
|
||||||
|
if err == nil && whistleStats != nil {
|
||||||
|
openReports := 0
|
||||||
|
for status, count := range whistleStats.ByStatus {
|
||||||
|
if status != "CLOSED" && status != "ARCHIVED" {
|
||||||
|
openReports += count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
report.Whistleblower = WhistleblowerSummary{
|
||||||
|
TotalReports: whistleStats.TotalReports,
|
||||||
|
OpenReports: openReports,
|
||||||
|
OverdueAcknowledgments: whistleStats.OverdueAcknowledgments,
|
||||||
|
OverdueFeedbacks: whistleStats.OverdueFeedbacks,
|
||||||
|
AvgResolutionDays: whistleStats.AvgResolutionDays,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Gather academy stats
|
||||||
|
academyStats, err := s.academyStore.GetStatistics(ctx, tenantID)
|
||||||
|
if err == nil && academyStats != nil {
|
||||||
|
report.Academy = AcademySummary{
|
||||||
|
TotalCourses: academyStats.TotalCourses,
|
||||||
|
TotalEnrollments: academyStats.TotalEnrollments,
|
||||||
|
CompletionRate: academyStats.CompletionRate,
|
||||||
|
OverdueCount: academyStats.OverdueCount,
|
||||||
|
AvgCompletionDays: academyStats.AvgCompletionDays,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Calculate risk overview
|
||||||
|
report.RiskOverview = s.calculateRiskOverview(report)
|
||||||
|
|
||||||
|
// 7. Calculate compliance score (0-100)
|
||||||
|
report.ComplianceScore = s.calculateComplianceScore(report)
|
||||||
|
|
||||||
|
// 8. Gather upcoming deadlines from DB
|
||||||
|
report.UpcomingDeadlines = s.getUpcomingDeadlines(ctx, tenantID)
|
||||||
|
|
||||||
|
// 9. Gather recent activity from DB
|
||||||
|
report.RecentActivity = s.getRecentActivity(ctx, tenantID)
|
||||||
|
|
||||||
|
return report, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) calculateRiskOverview(report *ExecutiveReport) RiskOverview {
|
||||||
|
modules := []ModuleRisk{}
|
||||||
|
|
||||||
|
// DSGVO risk based on overdue DSRs and missing TOMs
|
||||||
|
dsgvoScore := 100
|
||||||
|
dsgvoIssues := report.DSGVO.OverdueDSRs + report.DSGVO.TOMsPlanned
|
||||||
|
if report.DSGVO.OverdueDSRs > 0 {
|
||||||
|
dsgvoScore -= report.DSGVO.OverdueDSRs * 15
|
||||||
|
}
|
||||||
|
if report.DSGVO.TOMsTotal > 0 {
|
||||||
|
dsgvoScore = int(math.Round(float64(report.DSGVO.CompletionPercent)))
|
||||||
|
}
|
||||||
|
if dsgvoScore < 0 {
|
||||||
|
dsgvoScore = 0
|
||||||
|
}
|
||||||
|
modules = append(modules, ModuleRisk{Module: "DSGVO", Level: riskLevel(dsgvoScore), Score: dsgvoScore, Issues: dsgvoIssues})
|
||||||
|
|
||||||
|
// Vendor risk based on high-risk vendors and pending reviews
|
||||||
|
vendorScore := 100
|
||||||
|
vendorIssues := report.Vendors.PendingReviews + report.Vendors.ExpiredContracts
|
||||||
|
highRisk := 0
|
||||||
|
if v, ok := report.Vendors.ByRiskLevel["HIGH"]; ok {
|
||||||
|
highRisk += v
|
||||||
|
}
|
||||||
|
if v, ok := report.Vendors.ByRiskLevel["CRITICAL"]; ok {
|
||||||
|
highRisk += v
|
||||||
|
}
|
||||||
|
if report.Vendors.TotalVendors > 0 {
|
||||||
|
vendorScore = 100 - int(math.Round(float64(highRisk)/float64(report.Vendors.TotalVendors)*100))
|
||||||
|
}
|
||||||
|
vendorScore -= report.Vendors.PendingReviews * 5
|
||||||
|
vendorScore -= report.Vendors.ExpiredContracts * 10
|
||||||
|
if vendorScore < 0 {
|
||||||
|
vendorScore = 0
|
||||||
|
}
|
||||||
|
modules = append(modules, ModuleRisk{Module: "Vendors", Level: riskLevel(vendorScore), Score: vendorScore, Issues: vendorIssues})
|
||||||
|
|
||||||
|
// Incident risk
|
||||||
|
incidentScore := 100
|
||||||
|
incidentIssues := report.Incidents.OpenIncidents
|
||||||
|
incidentScore -= report.Incidents.CriticalIncidents * 20
|
||||||
|
incidentScore -= report.Incidents.OpenIncidents * 5
|
||||||
|
incidentScore -= report.Incidents.NotificationsPending * 15
|
||||||
|
if incidentScore < 0 {
|
||||||
|
incidentScore = 0
|
||||||
|
}
|
||||||
|
modules = append(modules, ModuleRisk{Module: "Incidents", Level: riskLevel(incidentScore), Score: incidentScore, Issues: incidentIssues})
|
||||||
|
|
||||||
|
// Whistleblower compliance
|
||||||
|
whistleScore := 100
|
||||||
|
whistleIssues := report.Whistleblower.OverdueAcknowledgments + report.Whistleblower.OverdueFeedbacks
|
||||||
|
whistleScore -= report.Whistleblower.OverdueAcknowledgments * 20
|
||||||
|
whistleScore -= report.Whistleblower.OverdueFeedbacks * 10
|
||||||
|
if whistleScore < 0 {
|
||||||
|
whistleScore = 0
|
||||||
|
}
|
||||||
|
modules = append(modules, ModuleRisk{Module: "Whistleblower", Level: riskLevel(whistleScore), Score: whistleScore, Issues: whistleIssues})
|
||||||
|
|
||||||
|
// Academy compliance
|
||||||
|
academyScore := int(math.Round(report.Academy.CompletionRate))
|
||||||
|
academyIssues := report.Academy.OverdueCount
|
||||||
|
modules = append(modules, ModuleRisk{Module: "Academy", Level: riskLevel(academyScore), Score: academyScore, Issues: academyIssues})
|
||||||
|
|
||||||
|
// Overall score is the average across modules
|
||||||
|
totalScore := 0
|
||||||
|
for _, m := range modules {
|
||||||
|
totalScore += m.Score
|
||||||
|
}
|
||||||
|
if len(modules) > 0 {
|
||||||
|
totalScore = totalScore / len(modules)
|
||||||
|
}
|
||||||
|
|
||||||
|
totalFindings := 0
|
||||||
|
criticalFindings := 0
|
||||||
|
for _, m := range modules {
|
||||||
|
totalFindings += m.Issues
|
||||||
|
if m.Level == "CRITICAL" {
|
||||||
|
criticalFindings += m.Issues
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return RiskOverview{
|
||||||
|
OverallLevel: riskLevel(totalScore),
|
||||||
|
ModuleRisks: modules,
|
||||||
|
OpenFindings: totalFindings,
|
||||||
|
CriticalFindings: criticalFindings,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func riskLevel(score int) string {
|
||||||
|
switch {
|
||||||
|
case score >= 75:
|
||||||
|
return "LOW"
|
||||||
|
case score >= 50:
|
||||||
|
return "MEDIUM"
|
||||||
|
case score >= 25:
|
||||||
|
return "HIGH"
|
||||||
|
default:
|
||||||
|
return "CRITICAL"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) calculateComplianceScore(report *ExecutiveReport) int {
|
||||||
|
scores := []int{}
|
||||||
|
weights := []int{}
|
||||||
|
|
||||||
|
// DSGVO: weight 30 (most important)
|
||||||
|
if report.DSGVO.TOMsTotal > 0 {
|
||||||
|
scores = append(scores, report.DSGVO.CompletionPercent)
|
||||||
|
} else {
|
||||||
|
scores = append(scores, 0)
|
||||||
|
}
|
||||||
|
weights = append(weights, 30)
|
||||||
|
|
||||||
|
// Vendor compliance: weight 20
|
||||||
|
vendorScore := 100
|
||||||
|
if report.Vendors.TotalVendors > 0 {
|
||||||
|
vendorScore -= report.Vendors.PendingReviews * 10
|
||||||
|
vendorScore -= report.Vendors.ExpiredContracts * 15
|
||||||
|
}
|
||||||
|
if vendorScore < 0 {
|
||||||
|
vendorScore = 0
|
||||||
|
}
|
||||||
|
scores = append(scores, vendorScore)
|
||||||
|
weights = append(weights, 20)
|
||||||
|
|
||||||
|
// Incident handling: weight 20
|
||||||
|
incidentScore := 100
|
||||||
|
incidentScore -= report.Incidents.OpenIncidents * 10
|
||||||
|
incidentScore -= report.Incidents.NotificationsPending * 20
|
||||||
|
if incidentScore < 0 {
|
||||||
|
incidentScore = 0
|
||||||
|
}
|
||||||
|
scores = append(scores, incidentScore)
|
||||||
|
weights = append(weights, 20)
|
||||||
|
|
||||||
|
// Whistleblower: weight 15
|
||||||
|
whistleScore := 100
|
||||||
|
whistleScore -= report.Whistleblower.OverdueAcknowledgments * 25
|
||||||
|
whistleScore -= report.Whistleblower.OverdueFeedbacks * 15
|
||||||
|
if whistleScore < 0 {
|
||||||
|
whistleScore = 0
|
||||||
|
}
|
||||||
|
scores = append(scores, whistleScore)
|
||||||
|
weights = append(weights, 15)
|
||||||
|
|
||||||
|
// Academy: weight 15
|
||||||
|
academyScore := int(math.Round(report.Academy.CompletionRate))
|
||||||
|
scores = append(scores, academyScore)
|
||||||
|
weights = append(weights, 15)
|
||||||
|
|
||||||
|
totalWeight := 0
|
||||||
|
weightedSum := 0
|
||||||
|
for i, sc := range scores {
|
||||||
|
weightedSum += sc * weights[i]
|
||||||
|
totalWeight += weights[i]
|
||||||
|
}
|
||||||
|
if totalWeight == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return int(math.Round(float64(weightedSum) / float64(totalWeight)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) getUpcomingDeadlines(ctx context.Context, tenantID uuid.UUID) []Deadline {
|
||||||
|
deadlines := []Deadline{}
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
// Vendor reviews due
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT name, next_review_date FROM vendor_vendors
|
||||||
|
WHERE tenant_id = $1 AND next_review_date IS NOT NULL
|
||||||
|
ORDER BY next_review_date ASC LIMIT 10
|
||||||
|
`, tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var name string
|
||||||
|
var dueDate time.Time
|
||||||
|
if err := rows.Scan(&name, &dueDate); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
daysLeft := int(dueDate.Sub(now).Hours() / 24)
|
||||||
|
severity := "INFO"
|
||||||
|
if daysLeft < 0 {
|
||||||
|
severity = "OVERDUE"
|
||||||
|
} else if daysLeft <= 7 {
|
||||||
|
severity = "URGENT"
|
||||||
|
} else if daysLeft <= 30 {
|
||||||
|
severity = "WARNING"
|
||||||
|
}
|
||||||
|
deadlines = append(deadlines, Deadline{
|
||||||
|
Module: "Vendors",
|
||||||
|
Type: "REVIEW",
|
||||||
|
Description: "Vendor-Review: " + name,
|
||||||
|
DueDate: dueDate,
|
||||||
|
DaysLeft: daysLeft,
|
||||||
|
Severity: severity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contract expirations
|
||||||
|
rows2, err := s.pool.Query(ctx, `
|
||||||
|
SELECT vv.name, vc.expiration_date, vc.document_type FROM vendor_contracts vc
|
||||||
|
JOIN vendor_vendors vv ON vc.vendor_id = vv.id
|
||||||
|
WHERE vc.tenant_id = $1 AND vc.expiration_date IS NOT NULL
|
||||||
|
ORDER BY vc.expiration_date ASC LIMIT 10
|
||||||
|
`, tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows2.Close()
|
||||||
|
for rows2.Next() {
|
||||||
|
var name, docType string
|
||||||
|
var dueDate time.Time
|
||||||
|
if err := rows2.Scan(&name, &dueDate, &docType); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
daysLeft := int(dueDate.Sub(now).Hours() / 24)
|
||||||
|
severity := "INFO"
|
||||||
|
if daysLeft < 0 {
|
||||||
|
severity = "OVERDUE"
|
||||||
|
} else if daysLeft <= 14 {
|
||||||
|
severity = "URGENT"
|
||||||
|
} else if daysLeft <= 60 {
|
||||||
|
severity = "WARNING"
|
||||||
|
}
|
||||||
|
deadlines = append(deadlines, Deadline{
|
||||||
|
Module: "Contracts",
|
||||||
|
Type: "EXPIRATION",
|
||||||
|
Description: docType + " läuft ab: " + name,
|
||||||
|
DueDate: dueDate,
|
||||||
|
DaysLeft: daysLeft,
|
||||||
|
Severity: severity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DSR deadlines (overdue)
|
||||||
|
rows3, err := s.pool.Query(ctx, `
|
||||||
|
SELECT request_type, deadline FROM dsgvo_dsr_requests
|
||||||
|
WHERE tenant_id = $1 AND status NOT IN ('COMPLETED', 'REJECTED')
|
||||||
|
AND deadline IS NOT NULL
|
||||||
|
ORDER BY deadline ASC LIMIT 10
|
||||||
|
`, tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows3.Close()
|
||||||
|
for rows3.Next() {
|
||||||
|
var reqType string
|
||||||
|
var dueDate time.Time
|
||||||
|
if err := rows3.Scan(&reqType, &dueDate); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
daysLeft := int(dueDate.Sub(now).Hours() / 24)
|
||||||
|
severity := "INFO"
|
||||||
|
if daysLeft < 0 {
|
||||||
|
severity = "OVERDUE"
|
||||||
|
} else if daysLeft <= 3 {
|
||||||
|
severity = "URGENT"
|
||||||
|
} else if daysLeft <= 14 {
|
||||||
|
severity = "WARNING"
|
||||||
|
}
|
||||||
|
deadlines = append(deadlines, Deadline{
|
||||||
|
Module: "DSR",
|
||||||
|
Type: "RESPONSE",
|
||||||
|
Description: "Betroffenenrecht: " + reqType,
|
||||||
|
DueDate: dueDate,
|
||||||
|
DaysLeft: daysLeft,
|
||||||
|
Severity: severity,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by due date ascending
|
||||||
|
sort.Slice(deadlines, func(i, j int) bool {
|
||||||
|
return deadlines[i].DueDate.Before(deadlines[j].DueDate)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Limit to top 15
|
||||||
|
if len(deadlines) > 15 {
|
||||||
|
deadlines = deadlines[:15]
|
||||||
|
}
|
||||||
|
|
||||||
|
return deadlines
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) getRecentActivity(ctx context.Context, tenantID uuid.UUID) []ActivityEntry {
|
||||||
|
activities := []ActivityEntry{}
|
||||||
|
|
||||||
|
// Recent vendors created/updated
|
||||||
|
rows, _ := s.pool.Query(ctx, `
|
||||||
|
SELECT name, created_at, 'CREATED' as action FROM vendor_vendors
|
||||||
|
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
|
||||||
|
UNION ALL
|
||||||
|
SELECT name, updated_at, 'UPDATED' FROM vendor_vendors
|
||||||
|
WHERE tenant_id = $1 AND updated_at > created_at AND updated_at > NOW() - INTERVAL '30 days'
|
||||||
|
ORDER BY 2 DESC LIMIT 5
|
||||||
|
`, tenantID)
|
||||||
|
if rows != nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var name, action string
|
||||||
|
var ts time.Time
|
||||||
|
if err := rows.Scan(&name, &ts, &action); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
desc := "Vendor "
|
||||||
|
if action == "CREATED" {
|
||||||
|
desc += "angelegt: "
|
||||||
|
} else {
|
||||||
|
desc += "aktualisiert: "
|
||||||
|
}
|
||||||
|
activities = append(activities, ActivityEntry{
|
||||||
|
Timestamp: ts,
|
||||||
|
Module: "Vendors",
|
||||||
|
Action: action,
|
||||||
|
Description: desc + name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recent incidents
|
||||||
|
rows2, _ := s.pool.Query(ctx, `
|
||||||
|
SELECT title, created_at, severity FROM incidents
|
||||||
|
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
|
||||||
|
ORDER BY created_at DESC LIMIT 5
|
||||||
|
`, tenantID)
|
||||||
|
if rows2 != nil {
|
||||||
|
defer rows2.Close()
|
||||||
|
for rows2.Next() {
|
||||||
|
var title, severity string
|
||||||
|
var ts time.Time
|
||||||
|
if err := rows2.Scan(&title, &ts, &severity); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
activities = append(activities, ActivityEntry{
|
||||||
|
Timestamp: ts,
|
||||||
|
Module: "Incidents",
|
||||||
|
Action: "CREATED",
|
||||||
|
Description: "Datenpanne (" + severity + "): " + title,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recent whistleblower reports (admin view)
|
||||||
|
rows3, _ := s.pool.Query(ctx, `
|
||||||
|
SELECT category, created_at FROM whistleblower_reports
|
||||||
|
WHERE tenant_id = $1 AND created_at > NOW() - INTERVAL '30 days'
|
||||||
|
ORDER BY created_at DESC LIMIT 5
|
||||||
|
`, tenantID)
|
||||||
|
if rows3 != nil {
|
||||||
|
defer rows3.Close()
|
||||||
|
for rows3.Next() {
|
||||||
|
var category string
|
||||||
|
var ts time.Time
|
||||||
|
if err := rows3.Scan(&category, &ts); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
activities = append(activities, ActivityEntry{
|
||||||
|
Timestamp: ts,
|
||||||
|
Module: "Whistleblower",
|
||||||
|
Action: "REPORT",
|
||||||
|
Description: "Neue Meldung: " + category,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by timestamp descending (most recent first)
|
||||||
|
sort.Slice(activities, func(i, j int) bool {
|
||||||
|
return activities[i].Timestamp.After(activities[j].Timestamp)
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(activities) > 20 {
|
||||||
|
activities = activities[:20]
|
||||||
|
}
|
||||||
|
|
||||||
|
return activities
|
||||||
|
}
|
||||||
158
ai-compliance-sdk/internal/sso/models.go
Normal file
158
ai-compliance-sdk/internal/sso/models.go
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
package sso
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Constants / Enums
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ProviderType represents the SSO authentication protocol.
|
||||||
|
type ProviderType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ProviderTypeOIDC represents OpenID Connect authentication.
|
||||||
|
ProviderTypeOIDC ProviderType = "oidc"
|
||||||
|
// ProviderTypeSAML represents SAML 2.0 authentication.
|
||||||
|
ProviderTypeSAML ProviderType = "saml"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Main Entities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SSOConfig represents a per-tenant SSO provider configuration supporting
|
||||||
|
// OIDC and SAML authentication protocols.
|
||||||
|
type SSOConfig struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
ProviderType ProviderType `json:"provider_type" db:"provider_type"`
|
||||||
|
Name string `json:"name" db:"name"`
|
||||||
|
Enabled bool `json:"enabled" db:"enabled"`
|
||||||
|
|
||||||
|
// OIDC settings
|
||||||
|
OIDCIssuerURL string `json:"oidc_issuer_url,omitempty" db:"oidc_issuer_url"`
|
||||||
|
OIDCClientID string `json:"oidc_client_id,omitempty" db:"oidc_client_id"`
|
||||||
|
OIDCClientSecret string `json:"oidc_client_secret,omitempty" db:"oidc_client_secret"`
|
||||||
|
OIDCRedirectURI string `json:"oidc_redirect_uri,omitempty" db:"oidc_redirect_uri"`
|
||||||
|
OIDCScopes []string `json:"oidc_scopes,omitempty" db:"oidc_scopes"`
|
||||||
|
|
||||||
|
// SAML settings (for future use)
|
||||||
|
SAMLEntityID string `json:"saml_entity_id,omitempty" db:"saml_entity_id"`
|
||||||
|
SAMLSSOURL string `json:"saml_sso_url,omitempty" db:"saml_sso_url"`
|
||||||
|
SAMLCertificate string `json:"saml_certificate,omitempty" db:"saml_certificate"`
|
||||||
|
SAMLACS_URL string `json:"saml_acs_url,omitempty" db:"saml_acs_url"`
|
||||||
|
|
||||||
|
// Role mapping: maps SSO group/role names to internal role IDs
|
||||||
|
RoleMapping map[string]string `json:"role_mapping" db:"role_mapping"`
|
||||||
|
DefaultRoleID *uuid.UUID `json:"default_role_id,omitempty" db:"default_role_id"`
|
||||||
|
AutoProvision bool `json:"auto_provision" db:"auto_provision"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSOUser represents a JIT-provisioned user authenticated via an SSO provider.
|
||||||
|
type SSOUser struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
SSOConfigID uuid.UUID `json:"sso_config_id" db:"sso_config_id"`
|
||||||
|
ExternalID string `json:"external_id" db:"external_id"`
|
||||||
|
Email string `json:"email" db:"email"`
|
||||||
|
DisplayName string `json:"display_name" db:"display_name"`
|
||||||
|
Groups []string `json:"groups" db:"groups"`
|
||||||
|
LastLogin *time.Time `json:"last_login,omitempty" db:"last_login"`
|
||||||
|
IsActive bool `json:"is_active" db:"is_active"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Request Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateSSOConfigRequest is the API request for creating an SSO configuration.
|
||||||
|
type CreateSSOConfigRequest struct {
|
||||||
|
ProviderType ProviderType `json:"provider_type" binding:"required"`
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
OIDCIssuerURL string `json:"oidc_issuer_url"`
|
||||||
|
OIDCClientID string `json:"oidc_client_id"`
|
||||||
|
OIDCClientSecret string `json:"oidc_client_secret"`
|
||||||
|
OIDCRedirectURI string `json:"oidc_redirect_uri"`
|
||||||
|
OIDCScopes []string `json:"oidc_scopes"`
|
||||||
|
RoleMapping map[string]string `json:"role_mapping"`
|
||||||
|
DefaultRoleID *uuid.UUID `json:"default_role_id"`
|
||||||
|
AutoProvision bool `json:"auto_provision"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateSSOConfigRequest is the API request for partially updating an SSO
|
||||||
|
// configuration. Pointer fields allow distinguishing between "not provided"
|
||||||
|
// (nil) and "set to zero value".
|
||||||
|
type UpdateSSOConfigRequest struct {
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Enabled *bool `json:"enabled"`
|
||||||
|
OIDCIssuerURL *string `json:"oidc_issuer_url"`
|
||||||
|
OIDCClientID *string `json:"oidc_client_id"`
|
||||||
|
OIDCClientSecret *string `json:"oidc_client_secret"`
|
||||||
|
OIDCRedirectURI *string `json:"oidc_redirect_uri"`
|
||||||
|
OIDCScopes []string `json:"oidc_scopes"`
|
||||||
|
RoleMapping map[string]string `json:"role_mapping"`
|
||||||
|
DefaultRoleID *uuid.UUID `json:"default_role_id"`
|
||||||
|
AutoProvision *bool `json:"auto_provision"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// JWT / Session Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SSOClaims holds the claims embedded in JWT tokens issued after successful
|
||||||
|
// SSO authentication. These are used for downstream authorization decisions.
|
||||||
|
type SSOClaims struct {
|
||||||
|
UserID uuid.UUID `json:"user_id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
DisplayName string `json:"display_name"`
|
||||||
|
Roles []string `json:"roles"`
|
||||||
|
SSOConfigID uuid.UUID `json:"sso_config_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// List / Filter Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// SSOConfigFilters defines filters for listing SSO configurations.
|
||||||
|
type SSOConfigFilters struct {
|
||||||
|
ProviderType ProviderType
|
||||||
|
Enabled *bool
|
||||||
|
Search string
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSOUserFilters defines filters for listing SSO users.
|
||||||
|
type SSOUserFilters struct {
|
||||||
|
SSOConfigID *uuid.UUID
|
||||||
|
Email string
|
||||||
|
IsActive *bool
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSOConfigListResponse is the API response for listing SSO configurations.
|
||||||
|
type SSOConfigListResponse struct {
|
||||||
|
Configs []SSOConfig `json:"configs"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSOUserListResponse is the API response for listing SSO users.
|
||||||
|
type SSOUserListResponse struct {
|
||||||
|
Users []SSOUser `json:"users"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
477
ai-compliance-sdk/internal/sso/store.go
Normal file
477
ai-compliance-sdk/internal/sso/store.go
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
package sso
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store handles SSO configuration and user data persistence.
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new SSO store.
|
||||||
|
func NewStore(pool *pgxpool.Pool) *Store {
|
||||||
|
return &Store{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// SSO Configuration CRUD Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateConfig creates a new SSO configuration for a tenant.
|
||||||
|
func (s *Store) CreateConfig(ctx context.Context, tenantID uuid.UUID, req *CreateSSOConfigRequest) (*SSOConfig, error) {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
|
||||||
|
cfg := &SSOConfig{
|
||||||
|
ID: uuid.New(),
|
||||||
|
TenantID: tenantID,
|
||||||
|
ProviderType: req.ProviderType,
|
||||||
|
Name: req.Name,
|
||||||
|
Enabled: req.Enabled,
|
||||||
|
OIDCIssuerURL: req.OIDCIssuerURL,
|
||||||
|
OIDCClientID: req.OIDCClientID,
|
||||||
|
OIDCClientSecret: req.OIDCClientSecret,
|
||||||
|
OIDCRedirectURI: req.OIDCRedirectURI,
|
||||||
|
OIDCScopes: req.OIDCScopes,
|
||||||
|
RoleMapping: req.RoleMapping,
|
||||||
|
DefaultRoleID: req.DefaultRoleID,
|
||||||
|
AutoProvision: req.AutoProvision,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply defaults
|
||||||
|
if len(cfg.OIDCScopes) == 0 {
|
||||||
|
cfg.OIDCScopes = []string{"openid", "profile", "email"}
|
||||||
|
}
|
||||||
|
if cfg.RoleMapping == nil {
|
||||||
|
cfg.RoleMapping = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
roleMappingJSON, err := json.Marshal(cfg.RoleMapping)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to marshal role_mapping: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO sso_configurations (
|
||||||
|
id, tenant_id, provider_type, name, enabled,
|
||||||
|
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
|
||||||
|
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
|
||||||
|
role_mapping, default_role_id, auto_provision,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5,
|
||||||
|
$6, $7, $8, $9, $10,
|
||||||
|
$11, $12, $13, $14,
|
||||||
|
$15, $16, $17,
|
||||||
|
$18, $19
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
cfg.ID, cfg.TenantID, string(cfg.ProviderType), cfg.Name, cfg.Enabled,
|
||||||
|
cfg.OIDCIssuerURL, cfg.OIDCClientID, cfg.OIDCClientSecret, cfg.OIDCRedirectURI, cfg.OIDCScopes,
|
||||||
|
cfg.SAMLEntityID, cfg.SAMLSSOURL, cfg.SAMLCertificate, cfg.SAMLACS_URL,
|
||||||
|
roleMappingJSON, cfg.DefaultRoleID, cfg.AutoProvision,
|
||||||
|
cfg.CreatedAt, cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to insert sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfig retrieves an SSO configuration by ID and tenant.
|
||||||
|
func (s *Store) GetConfig(ctx context.Context, tenantID, configID uuid.UUID) (*SSOConfig, error) {
|
||||||
|
var cfg SSOConfig
|
||||||
|
var providerType string
|
||||||
|
var roleMappingJSON []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, provider_type, name, enabled,
|
||||||
|
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
|
||||||
|
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
|
||||||
|
role_mapping, default_role_id, auto_provision,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_configurations
|
||||||
|
WHERE id = $1 AND tenant_id = $2
|
||||||
|
`, configID, tenantID).Scan(
|
||||||
|
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
|
||||||
|
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
|
||||||
|
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
|
||||||
|
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
|
||||||
|
&cfg.CreatedAt, &cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.ProviderType = ProviderType(providerType)
|
||||||
|
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
|
||||||
|
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfigByName retrieves an SSO configuration by name and tenant.
|
||||||
|
func (s *Store) GetConfigByName(ctx context.Context, tenantID uuid.UUID, name string) (*SSOConfig, error) {
|
||||||
|
var cfg SSOConfig
|
||||||
|
var providerType string
|
||||||
|
var roleMappingJSON []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, provider_type, name, enabled,
|
||||||
|
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
|
||||||
|
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
|
||||||
|
role_mapping, default_role_id, auto_provision,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_configurations
|
||||||
|
WHERE tenant_id = $1 AND name = $2
|
||||||
|
`, tenantID, name).Scan(
|
||||||
|
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
|
||||||
|
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
|
||||||
|
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
|
||||||
|
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
|
||||||
|
&cfg.CreatedAt, &cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get sso configuration by name: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.ProviderType = ProviderType(providerType)
|
||||||
|
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
|
||||||
|
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListConfigs lists all SSO configurations for a tenant.
|
||||||
|
func (s *Store) ListConfigs(ctx context.Context, tenantID uuid.UUID) ([]SSOConfig, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, provider_type, name, enabled,
|
||||||
|
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
|
||||||
|
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
|
||||||
|
role_mapping, default_role_id, auto_provision,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_configurations
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
ORDER BY name ASC
|
||||||
|
`, tenantID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to list sso configurations: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var configs []SSOConfig
|
||||||
|
for rows.Next() {
|
||||||
|
cfg, err := scanSSOConfig(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
configs = append(configs, *cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return configs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateConfig updates an existing SSO configuration with partial updates.
|
||||||
|
func (s *Store) UpdateConfig(ctx context.Context, tenantID, configID uuid.UUID, req *UpdateSSOConfigRequest) (*SSOConfig, error) {
|
||||||
|
cfg, err := s.GetConfig(ctx, tenantID, configID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, fmt.Errorf("sso configuration not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply partial updates
|
||||||
|
if req.Name != nil {
|
||||||
|
cfg.Name = *req.Name
|
||||||
|
}
|
||||||
|
if req.Enabled != nil {
|
||||||
|
cfg.Enabled = *req.Enabled
|
||||||
|
}
|
||||||
|
if req.OIDCIssuerURL != nil {
|
||||||
|
cfg.OIDCIssuerURL = *req.OIDCIssuerURL
|
||||||
|
}
|
||||||
|
if req.OIDCClientID != nil {
|
||||||
|
cfg.OIDCClientID = *req.OIDCClientID
|
||||||
|
}
|
||||||
|
if req.OIDCClientSecret != nil {
|
||||||
|
cfg.OIDCClientSecret = *req.OIDCClientSecret
|
||||||
|
}
|
||||||
|
if req.OIDCRedirectURI != nil {
|
||||||
|
cfg.OIDCRedirectURI = *req.OIDCRedirectURI
|
||||||
|
}
|
||||||
|
if req.OIDCScopes != nil {
|
||||||
|
cfg.OIDCScopes = req.OIDCScopes
|
||||||
|
}
|
||||||
|
if req.RoleMapping != nil {
|
||||||
|
cfg.RoleMapping = req.RoleMapping
|
||||||
|
}
|
||||||
|
if req.DefaultRoleID != nil {
|
||||||
|
cfg.DefaultRoleID = req.DefaultRoleID
|
||||||
|
}
|
||||||
|
if req.AutoProvision != nil {
|
||||||
|
cfg.AutoProvision = *req.AutoProvision
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.UpdatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
roleMappingJSON, err := json.Marshal(cfg.RoleMapping)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to marshal role_mapping: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
UPDATE sso_configurations SET
|
||||||
|
name = $3, enabled = $4,
|
||||||
|
oidc_issuer_url = $5, oidc_client_id = $6, oidc_client_secret = $7,
|
||||||
|
oidc_redirect_uri = $8, oidc_scopes = $9,
|
||||||
|
saml_entity_id = $10, saml_sso_url = $11, saml_certificate = $12, saml_acs_url = $13,
|
||||||
|
role_mapping = $14, default_role_id = $15, auto_provision = $16,
|
||||||
|
updated_at = $17
|
||||||
|
WHERE id = $1 AND tenant_id = $2
|
||||||
|
`,
|
||||||
|
cfg.ID, cfg.TenantID,
|
||||||
|
cfg.Name, cfg.Enabled,
|
||||||
|
cfg.OIDCIssuerURL, cfg.OIDCClientID, cfg.OIDCClientSecret,
|
||||||
|
cfg.OIDCRedirectURI, cfg.OIDCScopes,
|
||||||
|
cfg.SAMLEntityID, cfg.SAMLSSOURL, cfg.SAMLCertificate, cfg.SAMLACS_URL,
|
||||||
|
roleMappingJSON, cfg.DefaultRoleID, cfg.AutoProvision,
|
||||||
|
cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteConfig deletes an SSO configuration by ID and tenant.
|
||||||
|
func (s *Store) DeleteConfig(ctx context.Context, tenantID, configID uuid.UUID) error {
|
||||||
|
_, err := s.pool.Exec(ctx,
|
||||||
|
"DELETE FROM sso_configurations WHERE id = $1 AND tenant_id = $2",
|
||||||
|
configID, tenantID,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to delete sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetEnabledConfig retrieves the active/enabled SSO configuration for a tenant.
|
||||||
|
func (s *Store) GetEnabledConfig(ctx context.Context, tenantID uuid.UUID) (*SSOConfig, error) {
|
||||||
|
var cfg SSOConfig
|
||||||
|
var providerType string
|
||||||
|
var roleMappingJSON []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, provider_type, name, enabled,
|
||||||
|
oidc_issuer_url, oidc_client_id, oidc_client_secret, oidc_redirect_uri, oidc_scopes,
|
||||||
|
saml_entity_id, saml_sso_url, saml_certificate, saml_acs_url,
|
||||||
|
role_mapping, default_role_id, auto_provision,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_configurations
|
||||||
|
WHERE tenant_id = $1 AND enabled = true
|
||||||
|
LIMIT 1
|
||||||
|
`, tenantID).Scan(
|
||||||
|
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
|
||||||
|
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
|
||||||
|
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
|
||||||
|
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
|
||||||
|
&cfg.CreatedAt, &cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get enabled sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.ProviderType = ProviderType(providerType)
|
||||||
|
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
|
||||||
|
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// SSO User Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// UpsertUser inserts or updates an SSO user via JIT provisioning.
|
||||||
|
// On conflict (tenant_id, sso_config_id, external_id), the user's email,
|
||||||
|
// display name, groups, and last login timestamp are updated.
|
||||||
|
func (s *Store) UpsertUser(ctx context.Context, tenantID, ssoConfigID uuid.UUID, externalID, email, displayName string, groups []string) (*SSOUser, error) {
|
||||||
|
now := time.Now().UTC()
|
||||||
|
id := uuid.New()
|
||||||
|
|
||||||
|
var user SSOUser
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
INSERT INTO sso_users (
|
||||||
|
id, tenant_id, sso_config_id,
|
||||||
|
external_id, email, display_name, groups,
|
||||||
|
last_login, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3,
|
||||||
|
$4, $5, $6, $7,
|
||||||
|
$8, true,
|
||||||
|
$8, $8
|
||||||
|
)
|
||||||
|
ON CONFLICT (tenant_id, sso_config_id, external_id) DO UPDATE SET
|
||||||
|
email = EXCLUDED.email,
|
||||||
|
display_name = EXCLUDED.display_name,
|
||||||
|
groups = EXCLUDED.groups,
|
||||||
|
last_login = EXCLUDED.last_login,
|
||||||
|
is_active = true,
|
||||||
|
updated_at = EXCLUDED.updated_at
|
||||||
|
RETURNING
|
||||||
|
id, tenant_id, sso_config_id,
|
||||||
|
external_id, email, display_name, groups,
|
||||||
|
last_login, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
`,
|
||||||
|
id, tenantID, ssoConfigID,
|
||||||
|
externalID, email, displayName, groups,
|
||||||
|
now,
|
||||||
|
).Scan(
|
||||||
|
&user.ID, &user.TenantID, &user.SSOConfigID,
|
||||||
|
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
|
||||||
|
&user.LastLogin, &user.IsActive,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to upsert sso user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByExternalID looks up an SSO user by their external identity provider ID.
|
||||||
|
func (s *Store) GetUserByExternalID(ctx context.Context, tenantID, ssoConfigID uuid.UUID, externalID string) (*SSOUser, error) {
|
||||||
|
var user SSOUser
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, sso_config_id,
|
||||||
|
external_id, email, display_name, groups,
|
||||||
|
last_login, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_users
|
||||||
|
WHERE tenant_id = $1 AND sso_config_id = $2 AND external_id = $3
|
||||||
|
`, tenantID, ssoConfigID, externalID).Scan(
|
||||||
|
&user.ID, &user.TenantID, &user.SSOConfigID,
|
||||||
|
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
|
||||||
|
&user.LastLogin, &user.IsActive,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get sso user by external id: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListUsers lists all SSO-provisioned users for a tenant.
|
||||||
|
func (s *Store) ListUsers(ctx context.Context, tenantID uuid.UUID) ([]SSOUser, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, sso_config_id,
|
||||||
|
external_id, email, display_name, groups,
|
||||||
|
last_login, is_active,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM sso_users
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
ORDER BY display_name ASC
|
||||||
|
`, tenantID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to list sso users: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var users []SSOUser
|
||||||
|
for rows.Next() {
|
||||||
|
user, err := scanSSOUser(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
users = append(users, *user)
|
||||||
|
}
|
||||||
|
|
||||||
|
return users, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Row Scanning Helpers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// scanSSOConfig scans an SSO configuration row from pgx.Rows.
|
||||||
|
func scanSSOConfig(rows pgx.Rows) (*SSOConfig, error) {
|
||||||
|
var cfg SSOConfig
|
||||||
|
var providerType string
|
||||||
|
var roleMappingJSON []byte
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&cfg.ID, &cfg.TenantID, &providerType, &cfg.Name, &cfg.Enabled,
|
||||||
|
&cfg.OIDCIssuerURL, &cfg.OIDCClientID, &cfg.OIDCClientSecret, &cfg.OIDCRedirectURI, &cfg.OIDCScopes,
|
||||||
|
&cfg.SAMLEntityID, &cfg.SAMLSSOURL, &cfg.SAMLCertificate, &cfg.SAMLACS_URL,
|
||||||
|
&roleMappingJSON, &cfg.DefaultRoleID, &cfg.AutoProvision,
|
||||||
|
&cfg.CreatedAt, &cfg.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to scan sso configuration: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.ProviderType = ProviderType(providerType)
|
||||||
|
cfg.RoleMapping = unmarshalRoleMapping(roleMappingJSON)
|
||||||
|
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanSSOUser scans an SSO user row from pgx.Rows.
|
||||||
|
func scanSSOUser(rows pgx.Rows) (*SSOUser, error) {
|
||||||
|
var user SSOUser
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&user.ID, &user.TenantID, &user.SSOConfigID,
|
||||||
|
&user.ExternalID, &user.Email, &user.DisplayName, &user.Groups,
|
||||||
|
&user.LastLogin, &user.IsActive,
|
||||||
|
&user.CreatedAt, &user.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to scan sso user: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &user, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalRoleMapping safely unmarshals JSONB role_mapping bytes into a map.
|
||||||
|
func unmarshalRoleMapping(data []byte) map[string]string {
|
||||||
|
if data == nil {
|
||||||
|
return map[string]string{}
|
||||||
|
}
|
||||||
|
var m map[string]string
|
||||||
|
if err := json.Unmarshal(data, &m); err != nil {
|
||||||
|
return map[string]string{}
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
488
ai-compliance-sdk/internal/vendor/models.go
vendored
Normal file
488
ai-compliance-sdk/internal/vendor/models.go
vendored
Normal file
@@ -0,0 +1,488 @@
|
|||||||
|
package vendor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Constants / Enums
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// VendorRole represents the GDPR role of a vendor in data processing
|
||||||
|
type VendorRole string
|
||||||
|
|
||||||
|
const (
|
||||||
|
VendorRoleProcessor VendorRole = "PROCESSOR"
|
||||||
|
VendorRoleController VendorRole = "CONTROLLER"
|
||||||
|
VendorRoleJointController VendorRole = "JOINT_CONTROLLER"
|
||||||
|
VendorRoleSubProcessor VendorRole = "SUB_PROCESSOR"
|
||||||
|
VendorRoleThirdParty VendorRole = "THIRD_PARTY"
|
||||||
|
)
|
||||||
|
|
||||||
|
// VendorStatus represents the lifecycle status of a vendor
|
||||||
|
type VendorStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
VendorStatusActive VendorStatus = "ACTIVE"
|
||||||
|
VendorStatusInactive VendorStatus = "INACTIVE"
|
||||||
|
VendorStatusPendingReview VendorStatus = "PENDING_REVIEW"
|
||||||
|
VendorStatusTerminated VendorStatus = "TERMINATED"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocumentType represents the type of a contract/compliance document
|
||||||
|
type DocumentType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
DocumentTypeAVV DocumentType = "AVV"
|
||||||
|
DocumentTypeMSA DocumentType = "MSA"
|
||||||
|
DocumentTypeSLA DocumentType = "SLA"
|
||||||
|
DocumentTypeSCC DocumentType = "SCC"
|
||||||
|
DocumentTypeNDA DocumentType = "NDA"
|
||||||
|
DocumentTypeTOMAnnex DocumentType = "TOM_ANNEX"
|
||||||
|
DocumentTypeCertification DocumentType = "CERTIFICATION"
|
||||||
|
DocumentTypeSubProcessorList DocumentType = "SUB_PROCESSOR_LIST"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FindingType represents the type of a compliance finding
|
||||||
|
type FindingType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
FindingTypeOK FindingType = "OK"
|
||||||
|
FindingTypeGap FindingType = "GAP"
|
||||||
|
FindingTypeRisk FindingType = "RISK"
|
||||||
|
FindingTypeUnknown FindingType = "UNKNOWN"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FindingStatus represents the resolution status of a finding
|
||||||
|
type FindingStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
FindingStatusOpen FindingStatus = "OPEN"
|
||||||
|
FindingStatusInProgress FindingStatus = "IN_PROGRESS"
|
||||||
|
FindingStatusResolved FindingStatus = "RESOLVED"
|
||||||
|
FindingStatusAccepted FindingStatus = "ACCEPTED"
|
||||||
|
FindingStatusFalsePositive FindingStatus = "FALSE_POSITIVE"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ControlStatus represents the assessment status of a control instance
|
||||||
|
type ControlStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ControlStatusPass ControlStatus = "PASS"
|
||||||
|
ControlStatusPartial ControlStatus = "PARTIAL"
|
||||||
|
ControlStatusFail ControlStatus = "FAIL"
|
||||||
|
ControlStatusNotApplicable ControlStatus = "NOT_APPLICABLE"
|
||||||
|
ControlStatusPlanned ControlStatus = "PLANNED"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Main Entities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Vendor represents a third-party vendor/service provider subject to GDPR compliance
|
||||||
|
type Vendor struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
|
||||||
|
// Basic info
|
||||||
|
Name string `json:"name" db:"name"`
|
||||||
|
LegalForm string `json:"legal_form,omitempty" db:"legal_form"`
|
||||||
|
Country string `json:"country" db:"country"`
|
||||||
|
Address json.RawMessage `json:"address,omitempty" db:"address"`
|
||||||
|
Website string `json:"website,omitempty" db:"website"`
|
||||||
|
|
||||||
|
// Contact
|
||||||
|
ContactName string `json:"contact_name,omitempty" db:"contact_name"`
|
||||||
|
ContactEmail string `json:"contact_email,omitempty" db:"contact_email"`
|
||||||
|
ContactPhone string `json:"contact_phone,omitempty" db:"contact_phone"`
|
||||||
|
ContactDepartment string `json:"contact_department,omitempty" db:"contact_department"`
|
||||||
|
|
||||||
|
// GDPR role & service
|
||||||
|
Role VendorRole `json:"role" db:"role"`
|
||||||
|
ServiceCategory string `json:"service_category,omitempty" db:"service_category"`
|
||||||
|
ServiceDescription string `json:"service_description,omitempty" db:"service_description"`
|
||||||
|
DataAccessLevel string `json:"data_access_level,omitempty" db:"data_access_level"`
|
||||||
|
|
||||||
|
// Processing details (JSONB)
|
||||||
|
ProcessingLocations json.RawMessage `json:"processing_locations,omitempty" db:"processing_locations"`
|
||||||
|
Certifications json.RawMessage `json:"certifications,omitempty" db:"certifications"`
|
||||||
|
|
||||||
|
// Risk scoring
|
||||||
|
InherentRiskScore *int `json:"inherent_risk_score,omitempty" db:"inherent_risk_score"`
|
||||||
|
ResidualRiskScore *int `json:"residual_risk_score,omitempty" db:"residual_risk_score"`
|
||||||
|
ManualRiskAdjustment *int `json:"manual_risk_adjustment,omitempty" db:"manual_risk_adjustment"`
|
||||||
|
|
||||||
|
// Review schedule
|
||||||
|
ReviewFrequency string `json:"review_frequency,omitempty" db:"review_frequency"`
|
||||||
|
LastReviewDate *time.Time `json:"last_review_date,omitempty" db:"last_review_date"`
|
||||||
|
NextReviewDate *time.Time `json:"next_review_date,omitempty" db:"next_review_date"`
|
||||||
|
|
||||||
|
// Links to processing activities (JSONB)
|
||||||
|
ProcessingActivityIDs json.RawMessage `json:"processing_activity_ids,omitempty" db:"processing_activity_ids"`
|
||||||
|
|
||||||
|
// Status & template
|
||||||
|
Status VendorStatus `json:"status" db:"status"`
|
||||||
|
TemplateID *string `json:"template_id,omitempty" db:"template_id"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
CreatedBy string `json:"created_by" db:"created_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contract represents a contract/AVV document associated with a vendor
|
||||||
|
type Contract struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
VendorID uuid.UUID `json:"vendor_id" db:"vendor_id"`
|
||||||
|
|
||||||
|
// File metadata
|
||||||
|
FileName string `json:"file_name" db:"file_name"`
|
||||||
|
OriginalName string `json:"original_name" db:"original_name"`
|
||||||
|
MimeType string `json:"mime_type" db:"mime_type"`
|
||||||
|
FileSize *int64 `json:"file_size,omitempty" db:"file_size"`
|
||||||
|
StoragePath string `json:"storage_path" db:"storage_path"`
|
||||||
|
|
||||||
|
// Document classification
|
||||||
|
DocumentType DocumentType `json:"document_type" db:"document_type"`
|
||||||
|
|
||||||
|
// Contract details
|
||||||
|
Parties json.RawMessage `json:"parties,omitempty" db:"parties"`
|
||||||
|
EffectiveDate *time.Time `json:"effective_date,omitempty" db:"effective_date"`
|
||||||
|
ExpirationDate *time.Time `json:"expiration_date,omitempty" db:"expiration_date"`
|
||||||
|
AutoRenewal bool `json:"auto_renewal" db:"auto_renewal"`
|
||||||
|
RenewalNoticePeriod string `json:"renewal_notice_period,omitempty" db:"renewal_notice_period"`
|
||||||
|
|
||||||
|
// Review
|
||||||
|
ReviewStatus string `json:"review_status" db:"review_status"`
|
||||||
|
ReviewCompletedAt *time.Time `json:"review_completed_at,omitempty" db:"review_completed_at"`
|
||||||
|
ComplianceScore *int `json:"compliance_score,omitempty" db:"compliance_score"`
|
||||||
|
|
||||||
|
// Versioning
|
||||||
|
Version string `json:"version" db:"version"`
|
||||||
|
PreviousVersionID *string `json:"previous_version_id,omitempty" db:"previous_version_id"`
|
||||||
|
|
||||||
|
// Extracted content
|
||||||
|
ExtractedText string `json:"extracted_text,omitempty" db:"extracted_text"`
|
||||||
|
PageCount *int `json:"page_count,omitempty" db:"page_count"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
CreatedBy string `json:"created_by" db:"created_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finding represents a compliance finding from a contract review
|
||||||
|
type Finding struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
ContractID *string `json:"contract_id,omitempty" db:"contract_id"`
|
||||||
|
VendorID uuid.UUID `json:"vendor_id" db:"vendor_id"`
|
||||||
|
|
||||||
|
// Finding details
|
||||||
|
FindingType FindingType `json:"finding_type" db:"finding_type"`
|
||||||
|
Category string `json:"category" db:"category"`
|
||||||
|
Severity string `json:"severity" db:"severity"`
|
||||||
|
Title string `json:"title" db:"title"`
|
||||||
|
Description string `json:"description" db:"description"`
|
||||||
|
Recommendation string `json:"recommendation,omitempty" db:"recommendation"`
|
||||||
|
|
||||||
|
// Evidence (JSONB)
|
||||||
|
Citations json.RawMessage `json:"citations,omitempty" db:"citations"`
|
||||||
|
|
||||||
|
// Resolution workflow
|
||||||
|
Status FindingStatus `json:"status" db:"status"`
|
||||||
|
Assignee string `json:"assignee,omitempty" db:"assignee"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty" db:"due_date"`
|
||||||
|
Resolution string `json:"resolution,omitempty" db:"resolution"`
|
||||||
|
ResolvedAt *time.Time `json:"resolved_at,omitempty" db:"resolved_at"`
|
||||||
|
ResolvedBy *string `json:"resolved_by,omitempty" db:"resolved_by"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ControlInstance represents an applied control assessment for a specific vendor
|
||||||
|
type ControlInstance struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id" db:"tenant_id"`
|
||||||
|
VendorID uuid.UUID `json:"vendor_id" db:"vendor_id"`
|
||||||
|
|
||||||
|
// Control reference
|
||||||
|
ControlID string `json:"control_id" db:"control_id"`
|
||||||
|
ControlDomain string `json:"control_domain" db:"control_domain"`
|
||||||
|
|
||||||
|
// Assessment
|
||||||
|
Status ControlStatus `json:"status" db:"status"`
|
||||||
|
EvidenceIDs json.RawMessage `json:"evidence_ids,omitempty" db:"evidence_ids"`
|
||||||
|
Notes string `json:"notes,omitempty" db:"notes"`
|
||||||
|
|
||||||
|
// Assessment tracking
|
||||||
|
LastAssessedAt *time.Time `json:"last_assessed_at,omitempty" db:"last_assessed_at"`
|
||||||
|
LastAssessedBy *string `json:"last_assessed_by,omitempty" db:"last_assessed_by"`
|
||||||
|
NextAssessmentDate *time.Time `json:"next_assessment_date,omitempty" db:"next_assessment_date"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Template represents a pre-filled vendor compliance template
|
||||||
|
type Template struct {
|
||||||
|
ID uuid.UUID `json:"id" db:"id"`
|
||||||
|
TenantID *string `json:"tenant_id,omitempty" db:"tenant_id"`
|
||||||
|
|
||||||
|
// Template classification
|
||||||
|
TemplateType string `json:"template_type" db:"template_type"`
|
||||||
|
TemplateID string `json:"template_id" db:"template_id"`
|
||||||
|
Category string `json:"category" db:"category"`
|
||||||
|
|
||||||
|
// Localized names & descriptions
|
||||||
|
NameDE string `json:"name_de" db:"name_de"`
|
||||||
|
NameEN string `json:"name_en" db:"name_en"`
|
||||||
|
DescriptionDE string `json:"description_de" db:"description_de"`
|
||||||
|
DescriptionEN string `json:"description_en" db:"description_en"`
|
||||||
|
|
||||||
|
// Template content (JSONB)
|
||||||
|
TemplateData json.RawMessage `json:"template_data" db:"template_data"`
|
||||||
|
|
||||||
|
// Classification
|
||||||
|
Industry string `json:"industry,omitempty" db:"industry"`
|
||||||
|
Tags json.RawMessage `json:"tags,omitempty" db:"tags"`
|
||||||
|
|
||||||
|
// Flags
|
||||||
|
IsSystem bool `json:"is_system" db:"is_system"`
|
||||||
|
IsActive bool `json:"is_active" db:"is_active"`
|
||||||
|
|
||||||
|
// Usage tracking
|
||||||
|
UsageCount int `json:"usage_count" db:"usage_count"`
|
||||||
|
|
||||||
|
// Audit
|
||||||
|
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// VendorStats contains aggregated vendor compliance statistics for a tenant
|
||||||
|
type VendorStats struct {
|
||||||
|
TotalVendors int `json:"total_vendors"`
|
||||||
|
ByStatus map[string]int `json:"by_status"`
|
||||||
|
ByRole map[string]int `json:"by_role"`
|
||||||
|
ByRiskLevel map[string]int `json:"by_risk_level"`
|
||||||
|
PendingReviews int `json:"pending_reviews"`
|
||||||
|
ExpiredContracts int `json:"expired_contracts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// -- Vendor -------------------------------------------------------------------
|
||||||
|
|
||||||
|
// CreateVendorRequest is the API request for creating a vendor
|
||||||
|
type CreateVendorRequest struct {
|
||||||
|
Name string `json:"name" binding:"required"`
|
||||||
|
LegalForm string `json:"legal_form,omitempty"`
|
||||||
|
Country string `json:"country" binding:"required"`
|
||||||
|
Address json.RawMessage `json:"address,omitempty"`
|
||||||
|
Website string `json:"website,omitempty"`
|
||||||
|
ContactName string `json:"contact_name,omitempty"`
|
||||||
|
ContactEmail string `json:"contact_email,omitempty"`
|
||||||
|
ContactPhone string `json:"contact_phone,omitempty"`
|
||||||
|
ContactDepartment string `json:"contact_department,omitempty"`
|
||||||
|
Role VendorRole `json:"role" binding:"required"`
|
||||||
|
ServiceCategory string `json:"service_category,omitempty"`
|
||||||
|
ServiceDescription string `json:"service_description,omitempty"`
|
||||||
|
DataAccessLevel string `json:"data_access_level,omitempty"`
|
||||||
|
ProcessingLocations json.RawMessage `json:"processing_locations,omitempty"`
|
||||||
|
Certifications json.RawMessage `json:"certifications,omitempty"`
|
||||||
|
ReviewFrequency string `json:"review_frequency,omitempty"`
|
||||||
|
ProcessingActivityIDs json.RawMessage `json:"processing_activity_ids,omitempty"`
|
||||||
|
TemplateID *string `json:"template_id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateVendorRequest is the API request for updating a vendor
|
||||||
|
type UpdateVendorRequest struct {
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
LegalForm *string `json:"legal_form,omitempty"`
|
||||||
|
Country *string `json:"country,omitempty"`
|
||||||
|
Address json.RawMessage `json:"address,omitempty"`
|
||||||
|
Website *string `json:"website,omitempty"`
|
||||||
|
ContactName *string `json:"contact_name,omitempty"`
|
||||||
|
ContactEmail *string `json:"contact_email,omitempty"`
|
||||||
|
ContactPhone *string `json:"contact_phone,omitempty"`
|
||||||
|
ContactDepartment *string `json:"contact_department,omitempty"`
|
||||||
|
Role *VendorRole `json:"role,omitempty"`
|
||||||
|
ServiceCategory *string `json:"service_category,omitempty"`
|
||||||
|
ServiceDescription *string `json:"service_description,omitempty"`
|
||||||
|
DataAccessLevel *string `json:"data_access_level,omitempty"`
|
||||||
|
ProcessingLocations json.RawMessage `json:"processing_locations,omitempty"`
|
||||||
|
Certifications json.RawMessage `json:"certifications,omitempty"`
|
||||||
|
InherentRiskScore *int `json:"inherent_risk_score,omitempty"`
|
||||||
|
ResidualRiskScore *int `json:"residual_risk_score,omitempty"`
|
||||||
|
ManualRiskAdjustment *int `json:"manual_risk_adjustment,omitempty"`
|
||||||
|
ReviewFrequency *string `json:"review_frequency,omitempty"`
|
||||||
|
LastReviewDate *time.Time `json:"last_review_date,omitempty"`
|
||||||
|
NextReviewDate *time.Time `json:"next_review_date,omitempty"`
|
||||||
|
ProcessingActivityIDs json.RawMessage `json:"processing_activity_ids,omitempty"`
|
||||||
|
Status *VendorStatus `json:"status,omitempty"`
|
||||||
|
TemplateID *string `json:"template_id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Contract -----------------------------------------------------------------
|
||||||
|
|
||||||
|
// CreateContractRequest is the API request for creating a contract
|
||||||
|
type CreateContractRequest struct {
|
||||||
|
VendorID uuid.UUID `json:"vendor_id" binding:"required"`
|
||||||
|
FileName string `json:"file_name" binding:"required"`
|
||||||
|
OriginalName string `json:"original_name" binding:"required"`
|
||||||
|
MimeType string `json:"mime_type" binding:"required"`
|
||||||
|
FileSize *int64 `json:"file_size,omitempty"`
|
||||||
|
StoragePath string `json:"storage_path" binding:"required"`
|
||||||
|
DocumentType DocumentType `json:"document_type" binding:"required"`
|
||||||
|
Parties json.RawMessage `json:"parties,omitempty"`
|
||||||
|
EffectiveDate *time.Time `json:"effective_date,omitempty"`
|
||||||
|
ExpirationDate *time.Time `json:"expiration_date,omitempty"`
|
||||||
|
AutoRenewal bool `json:"auto_renewal"`
|
||||||
|
RenewalNoticePeriod string `json:"renewal_notice_period,omitempty"`
|
||||||
|
Version string `json:"version,omitempty"`
|
||||||
|
PreviousVersionID *string `json:"previous_version_id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateContractRequest is the API request for updating a contract
|
||||||
|
type UpdateContractRequest struct {
|
||||||
|
DocumentType *DocumentType `json:"document_type,omitempty"`
|
||||||
|
Parties json.RawMessage `json:"parties,omitempty"`
|
||||||
|
EffectiveDate *time.Time `json:"effective_date,omitempty"`
|
||||||
|
ExpirationDate *time.Time `json:"expiration_date,omitempty"`
|
||||||
|
AutoRenewal *bool `json:"auto_renewal,omitempty"`
|
||||||
|
RenewalNoticePeriod *string `json:"renewal_notice_period,omitempty"`
|
||||||
|
ReviewStatus *string `json:"review_status,omitempty"`
|
||||||
|
ReviewCompletedAt *time.Time `json:"review_completed_at,omitempty"`
|
||||||
|
ComplianceScore *int `json:"compliance_score,omitempty"`
|
||||||
|
Version *string `json:"version,omitempty"`
|
||||||
|
ExtractedText *string `json:"extracted_text,omitempty"`
|
||||||
|
PageCount *int `json:"page_count,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Finding ------------------------------------------------------------------
|
||||||
|
|
||||||
|
// CreateFindingRequest is the API request for creating a compliance finding
|
||||||
|
type CreateFindingRequest struct {
|
||||||
|
ContractID *string `json:"contract_id,omitempty"`
|
||||||
|
VendorID uuid.UUID `json:"vendor_id" binding:"required"`
|
||||||
|
FindingType FindingType `json:"finding_type" binding:"required"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
Severity string `json:"severity" binding:"required"`
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
Recommendation string `json:"recommendation,omitempty"`
|
||||||
|
Citations json.RawMessage `json:"citations,omitempty"`
|
||||||
|
Assignee string `json:"assignee,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFindingRequest is the API request for updating a finding
|
||||||
|
type UpdateFindingRequest struct {
|
||||||
|
FindingType *FindingType `json:"finding_type,omitempty"`
|
||||||
|
Category *string `json:"category,omitempty"`
|
||||||
|
Severity *string `json:"severity,omitempty"`
|
||||||
|
Title *string `json:"title,omitempty"`
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
Recommendation *string `json:"recommendation,omitempty"`
|
||||||
|
Citations json.RawMessage `json:"citations,omitempty"`
|
||||||
|
Status *FindingStatus `json:"status,omitempty"`
|
||||||
|
Assignee *string `json:"assignee,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
Resolution *string `json:"resolution,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveFindingRequest is the API request for resolving a finding
|
||||||
|
type ResolveFindingRequest struct {
|
||||||
|
Resolution string `json:"resolution" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- ControlInstance ----------------------------------------------------------
|
||||||
|
|
||||||
|
// UpdateControlInstanceRequest is the API request for updating a control instance
|
||||||
|
type UpdateControlInstanceRequest struct {
|
||||||
|
Status *ControlStatus `json:"status,omitempty"`
|
||||||
|
EvidenceIDs json.RawMessage `json:"evidence_ids,omitempty"`
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
NextAssessmentDate *time.Time `json:"next_assessment_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Template -----------------------------------------------------------------
|
||||||
|
|
||||||
|
// CreateTemplateRequest is the API request for creating a vendor template
|
||||||
|
type CreateTemplateRequest struct {
|
||||||
|
TemplateType string `json:"template_type" binding:"required"`
|
||||||
|
TemplateID string `json:"template_id" binding:"required"`
|
||||||
|
Category string `json:"category" binding:"required"`
|
||||||
|
NameDE string `json:"name_de" binding:"required"`
|
||||||
|
NameEN string `json:"name_en" binding:"required"`
|
||||||
|
DescriptionDE string `json:"description_de,omitempty"`
|
||||||
|
DescriptionEN string `json:"description_en,omitempty"`
|
||||||
|
TemplateData json.RawMessage `json:"template_data" binding:"required"`
|
||||||
|
Industry string `json:"industry,omitempty"`
|
||||||
|
Tags json.RawMessage `json:"tags,omitempty"`
|
||||||
|
IsSystem bool `json:"is_system"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// List / Filter Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// VendorFilters defines filters for listing vendors
|
||||||
|
type VendorFilters struct {
|
||||||
|
Status VendorStatus
|
||||||
|
Role VendorRole
|
||||||
|
Search string
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContractFilters defines filters for listing contracts
|
||||||
|
type ContractFilters struct {
|
||||||
|
VendorID *uuid.UUID
|
||||||
|
DocumentType DocumentType
|
||||||
|
ReviewStatus string
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindingFilters defines filters for listing findings
|
||||||
|
type FindingFilters struct {
|
||||||
|
VendorID *uuid.UUID
|
||||||
|
ContractID *string
|
||||||
|
Status FindingStatus
|
||||||
|
FindingType FindingType
|
||||||
|
Severity string
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// VendorListResponse is the API response for listing vendors
|
||||||
|
type VendorListResponse struct {
|
||||||
|
Vendors []Vendor `json:"vendors"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContractListResponse is the API response for listing contracts
|
||||||
|
type ContractListResponse struct {
|
||||||
|
Contracts []Contract `json:"contracts"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindingListResponse is the API response for listing findings
|
||||||
|
type FindingListResponse struct {
|
||||||
|
Findings []Finding `json:"findings"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
1116
ai-compliance-sdk/internal/vendor/store.go
vendored
Normal file
1116
ai-compliance-sdk/internal/vendor/store.go
vendored
Normal file
File diff suppressed because it is too large
Load Diff
242
ai-compliance-sdk/internal/whistleblower/models.go
Normal file
242
ai-compliance-sdk/internal/whistleblower/models.go
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
package whistleblower
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Constants / Enums
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ReportCategory represents the category of a whistleblower report
|
||||||
|
type ReportCategory string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ReportCategoryCorruption ReportCategory = "corruption"
|
||||||
|
ReportCategoryFraud ReportCategory = "fraud"
|
||||||
|
ReportCategoryDataProtection ReportCategory = "data_protection"
|
||||||
|
ReportCategoryDiscrimination ReportCategory = "discrimination"
|
||||||
|
ReportCategoryEnvironment ReportCategory = "environment"
|
||||||
|
ReportCategoryCompetition ReportCategory = "competition"
|
||||||
|
ReportCategoryProductSafety ReportCategory = "product_safety"
|
||||||
|
ReportCategoryTaxEvasion ReportCategory = "tax_evasion"
|
||||||
|
ReportCategoryOther ReportCategory = "other"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReportStatus represents the status of a whistleblower report
|
||||||
|
type ReportStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ReportStatusNew ReportStatus = "new"
|
||||||
|
ReportStatusAcknowledged ReportStatus = "acknowledged"
|
||||||
|
ReportStatusUnderReview ReportStatus = "under_review"
|
||||||
|
ReportStatusInvestigation ReportStatus = "investigation"
|
||||||
|
ReportStatusMeasuresTaken ReportStatus = "measures_taken"
|
||||||
|
ReportStatusClosed ReportStatus = "closed"
|
||||||
|
ReportStatusRejected ReportStatus = "rejected"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MessageDirection represents the direction of an anonymous message
|
||||||
|
type MessageDirection string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MessageDirectionReporterToAdmin MessageDirection = "reporter_to_admin"
|
||||||
|
MessageDirectionAdminToReporter MessageDirection = "admin_to_reporter"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MeasureStatus represents the status of a corrective measure
|
||||||
|
type MeasureStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MeasureStatusPlanned MeasureStatus = "planned"
|
||||||
|
MeasureStatusInProgress MeasureStatus = "in_progress"
|
||||||
|
MeasureStatusCompleted MeasureStatus = "completed"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Main Entities
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Report represents a whistleblower report (Hinweis) per HinSchG
|
||||||
|
type Report struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
TenantID uuid.UUID `json:"tenant_id"`
|
||||||
|
ReferenceNumber string `json:"reference_number"` // e.g. "WB-2026-0001"
|
||||||
|
AccessKey string `json:"access_key,omitempty"` // for anonymous access, only returned once
|
||||||
|
|
||||||
|
// Report content
|
||||||
|
Category ReportCategory `json:"category"`
|
||||||
|
Status ReportStatus `json:"status"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
|
||||||
|
// Reporter info (optional, for non-anonymous reports)
|
||||||
|
IsAnonymous bool `json:"is_anonymous"`
|
||||||
|
ReporterName *string `json:"reporter_name,omitempty"`
|
||||||
|
ReporterEmail *string `json:"reporter_email,omitempty"`
|
||||||
|
ReporterPhone *string `json:"reporter_phone,omitempty"`
|
||||||
|
|
||||||
|
// HinSchG deadlines
|
||||||
|
ReceivedAt time.Time `json:"received_at"`
|
||||||
|
DeadlineAcknowledgment time.Time `json:"deadline_acknowledgment"` // 7 days from received_at per HinSchG
|
||||||
|
DeadlineFeedback time.Time `json:"deadline_feedback"` // 3 months from received_at per HinSchG
|
||||||
|
|
||||||
|
// Status timestamps
|
||||||
|
AcknowledgedAt *time.Time `json:"acknowledged_at,omitempty"`
|
||||||
|
ClosedAt *time.Time `json:"closed_at,omitempty"`
|
||||||
|
|
||||||
|
// Assignment
|
||||||
|
AssignedTo *uuid.UUID `json:"assigned_to,omitempty"`
|
||||||
|
|
||||||
|
// Resolution
|
||||||
|
Resolution string `json:"resolution,omitempty"`
|
||||||
|
|
||||||
|
// Audit trail (stored as JSONB)
|
||||||
|
AuditTrail []AuditEntry `json:"audit_trail"`
|
||||||
|
|
||||||
|
// Timestamps
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AnonymousMessage represents a message exchanged between reporter and admin
|
||||||
|
type AnonymousMessage struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
ReportID uuid.UUID `json:"report_id"`
|
||||||
|
Direction MessageDirection `json:"direction"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
SentAt time.Time `json:"sent_at"`
|
||||||
|
ReadAt *time.Time `json:"read_at,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Measure represents a corrective measure taken for a report
|
||||||
|
type Measure struct {
|
||||||
|
ID uuid.UUID `json:"id"`
|
||||||
|
ReportID uuid.UUID `json:"report_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Status MeasureStatus `json:"status"`
|
||||||
|
Responsible string `json:"responsible"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AuditEntry represents an entry in the audit trail
|
||||||
|
type AuditEntry struct {
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
Action string `json:"action"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Details string `json:"details"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// WhistleblowerStatistics contains aggregated statistics for a tenant
|
||||||
|
type WhistleblowerStatistics struct {
|
||||||
|
TotalReports int `json:"total_reports"`
|
||||||
|
ByStatus map[string]int `json:"by_status"`
|
||||||
|
ByCategory map[string]int `json:"by_category"`
|
||||||
|
OverdueAcknowledgments int `json:"overdue_acknowledgments"`
|
||||||
|
OverdueFeedbacks int `json:"overdue_feedbacks"`
|
||||||
|
AvgResolutionDays float64 `json:"avg_resolution_days"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// API Request/Response Types
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// PublicReportSubmission is the request for submitting a report (NO auth required)
|
||||||
|
type PublicReportSubmission struct {
|
||||||
|
Category ReportCategory `json:"category" binding:"required"`
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description" binding:"required"`
|
||||||
|
IsAnonymous bool `json:"is_anonymous"`
|
||||||
|
ReporterName *string `json:"reporter_name,omitempty"`
|
||||||
|
ReporterEmail *string `json:"reporter_email,omitempty"`
|
||||||
|
ReporterPhone *string `json:"reporter_phone,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PublicReportResponse is returned after submitting a report (access_key only shown once!)
|
||||||
|
type PublicReportResponse struct {
|
||||||
|
ReferenceNumber string `json:"reference_number"`
|
||||||
|
AccessKey string `json:"access_key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportUpdateRequest is the request for updating a report (admin)
|
||||||
|
type ReportUpdateRequest struct {
|
||||||
|
Category ReportCategory `json:"category,omitempty"`
|
||||||
|
Status ReportStatus `json:"status,omitempty"`
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
AssignedTo *uuid.UUID `json:"assigned_to,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeRequest is the request for acknowledging a report
|
||||||
|
type AcknowledgeRequest struct {
|
||||||
|
Message string `json:"message,omitempty"` // optional acknowledgment message to reporter
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseReportRequest is the request for closing a report
|
||||||
|
type CloseReportRequest struct {
|
||||||
|
Resolution string `json:"resolution" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddMeasureRequest is the request for adding a corrective measure
|
||||||
|
type AddMeasureRequest struct {
|
||||||
|
Title string `json:"title" binding:"required"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Responsible string `json:"responsible" binding:"required"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMeasureRequest is the request for updating a measure
|
||||||
|
type UpdateMeasureRequest struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Status MeasureStatus `json:"status,omitempty"`
|
||||||
|
Responsible string `json:"responsible,omitempty"`
|
||||||
|
DueDate *time.Time `json:"due_date,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SendMessageRequest is the request for sending an anonymous message
|
||||||
|
type SendMessageRequest struct {
|
||||||
|
Content string `json:"content" binding:"required"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportListResponse is the response for listing reports
|
||||||
|
type ReportListResponse struct {
|
||||||
|
Reports []Report `json:"reports"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReportFilters defines filters for listing reports
|
||||||
|
type ReportFilters struct {
|
||||||
|
Status ReportStatus
|
||||||
|
Category ReportCategory
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Helper Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// generateAccessKey generates a random 12-character alphanumeric key
|
||||||
|
func generateAccessKey() string {
|
||||||
|
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
|
||||||
|
b := make([]byte, 12)
|
||||||
|
randomBytes := make([]byte, 12)
|
||||||
|
rand.Read(randomBytes)
|
||||||
|
for i := range b {
|
||||||
|
b[i] = charset[int(randomBytes[i])%len(charset)]
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateReferenceNumber generates a reference number like "WB-2026-0042"
|
||||||
|
func generateReferenceNumber(year int, sequence int) string {
|
||||||
|
return fmt.Sprintf("WB-%d-%04d", year, sequence)
|
||||||
|
}
|
||||||
591
ai-compliance-sdk/internal/whistleblower/store.go
Normal file
591
ai-compliance-sdk/internal/whistleblower/store.go
Normal file
@@ -0,0 +1,591 @@
|
|||||||
|
package whistleblower
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Store handles whistleblower data persistence
|
||||||
|
type Store struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStore creates a new whistleblower store
|
||||||
|
func NewStore(pool *pgxpool.Pool) *Store {
|
||||||
|
return &Store{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Report CRUD Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateReport creates a new whistleblower report with auto-generated reference number and access key
|
||||||
|
func (s *Store) CreateReport(ctx context.Context, report *Report) error {
|
||||||
|
report.ID = uuid.New()
|
||||||
|
now := time.Now().UTC()
|
||||||
|
report.CreatedAt = now
|
||||||
|
report.UpdatedAt = now
|
||||||
|
report.ReceivedAt = now
|
||||||
|
report.DeadlineAcknowledgment = now.AddDate(0, 0, 7) // 7 days per HinSchG
|
||||||
|
report.DeadlineFeedback = now.AddDate(0, 3, 0) // 3 months per HinSchG
|
||||||
|
|
||||||
|
if report.Status == "" {
|
||||||
|
report.Status = ReportStatusNew
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate access key
|
||||||
|
report.AccessKey = generateAccessKey()
|
||||||
|
|
||||||
|
// Generate reference number
|
||||||
|
year := now.Year()
|
||||||
|
seq, err := s.GetNextSequenceNumber(ctx, report.TenantID, year)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get sequence number: %w", err)
|
||||||
|
}
|
||||||
|
report.ReferenceNumber = generateReferenceNumber(year, seq)
|
||||||
|
|
||||||
|
// Initialize audit trail
|
||||||
|
if report.AuditTrail == nil {
|
||||||
|
report.AuditTrail = []AuditEntry{}
|
||||||
|
}
|
||||||
|
report.AuditTrail = append(report.AuditTrail, AuditEntry{
|
||||||
|
Timestamp: now,
|
||||||
|
Action: "report_created",
|
||||||
|
UserID: "system",
|
||||||
|
Details: "Report submitted",
|
||||||
|
})
|
||||||
|
|
||||||
|
auditTrailJSON, _ := json.Marshal(report.AuditTrail)
|
||||||
|
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO whistleblower_reports (
|
||||||
|
id, tenant_id, reference_number, access_key,
|
||||||
|
category, status, title, description,
|
||||||
|
is_anonymous, reporter_name, reporter_email, reporter_phone,
|
||||||
|
received_at, deadline_acknowledgment, deadline_feedback,
|
||||||
|
acknowledged_at, closed_at, assigned_to,
|
||||||
|
audit_trail, resolution,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4,
|
||||||
|
$5, $6, $7, $8,
|
||||||
|
$9, $10, $11, $12,
|
||||||
|
$13, $14, $15,
|
||||||
|
$16, $17, $18,
|
||||||
|
$19, $20,
|
||||||
|
$21, $22
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
report.ID, report.TenantID, report.ReferenceNumber, report.AccessKey,
|
||||||
|
string(report.Category), string(report.Status), report.Title, report.Description,
|
||||||
|
report.IsAnonymous, report.ReporterName, report.ReporterEmail, report.ReporterPhone,
|
||||||
|
report.ReceivedAt, report.DeadlineAcknowledgment, report.DeadlineFeedback,
|
||||||
|
report.AcknowledgedAt, report.ClosedAt, report.AssignedTo,
|
||||||
|
auditTrailJSON, report.Resolution,
|
||||||
|
report.CreatedAt, report.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetReport retrieves a report by ID
|
||||||
|
func (s *Store) GetReport(ctx context.Context, id uuid.UUID) (*Report, error) {
|
||||||
|
var report Report
|
||||||
|
var category, status string
|
||||||
|
var auditTrailJSON []byte
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, reference_number, access_key,
|
||||||
|
category, status, title, description,
|
||||||
|
is_anonymous, reporter_name, reporter_email, reporter_phone,
|
||||||
|
received_at, deadline_acknowledgment, deadline_feedback,
|
||||||
|
acknowledged_at, closed_at, assigned_to,
|
||||||
|
audit_trail, resolution,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM whistleblower_reports WHERE id = $1
|
||||||
|
`, id).Scan(
|
||||||
|
&report.ID, &report.TenantID, &report.ReferenceNumber, &report.AccessKey,
|
||||||
|
&category, &status, &report.Title, &report.Description,
|
||||||
|
&report.IsAnonymous, &report.ReporterName, &report.ReporterEmail, &report.ReporterPhone,
|
||||||
|
&report.ReceivedAt, &report.DeadlineAcknowledgment, &report.DeadlineFeedback,
|
||||||
|
&report.AcknowledgedAt, &report.ClosedAt, &report.AssignedTo,
|
||||||
|
&auditTrailJSON, &report.Resolution,
|
||||||
|
&report.CreatedAt, &report.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
report.Category = ReportCategory(category)
|
||||||
|
report.Status = ReportStatus(status)
|
||||||
|
json.Unmarshal(auditTrailJSON, &report.AuditTrail)
|
||||||
|
|
||||||
|
return &report, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetReportByAccessKey retrieves a report by its access key (for public anonymous access)
|
||||||
|
func (s *Store) GetReportByAccessKey(ctx context.Context, accessKey string) (*Report, error) {
|
||||||
|
var id uuid.UUID
|
||||||
|
err := s.pool.QueryRow(ctx,
|
||||||
|
"SELECT id FROM whistleblower_reports WHERE access_key = $1",
|
||||||
|
accessKey,
|
||||||
|
).Scan(&id)
|
||||||
|
|
||||||
|
if err == pgx.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.GetReport(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListReports lists reports for a tenant with optional filters
|
||||||
|
func (s *Store) ListReports(ctx context.Context, tenantID uuid.UUID, filters *ReportFilters) ([]Report, int, error) {
|
||||||
|
// Count total
|
||||||
|
countQuery := "SELECT COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1"
|
||||||
|
countArgs := []interface{}{tenantID}
|
||||||
|
countArgIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.Status != "" {
|
||||||
|
countQuery += fmt.Sprintf(" AND status = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Status))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
if filters.Category != "" {
|
||||||
|
countQuery += fmt.Sprintf(" AND category = $%d", countArgIdx)
|
||||||
|
countArgs = append(countArgs, string(filters.Category))
|
||||||
|
countArgIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var total int
|
||||||
|
err := s.pool.QueryRow(ctx, countQuery, countArgs...).Scan(&total)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build data query
|
||||||
|
query := `
|
||||||
|
SELECT
|
||||||
|
id, tenant_id, reference_number, access_key,
|
||||||
|
category, status, title, description,
|
||||||
|
is_anonymous, reporter_name, reporter_email, reporter_phone,
|
||||||
|
received_at, deadline_acknowledgment, deadline_feedback,
|
||||||
|
acknowledged_at, closed_at, assigned_to,
|
||||||
|
audit_trail, resolution,
|
||||||
|
created_at, updated_at
|
||||||
|
FROM whistleblower_reports WHERE tenant_id = $1`
|
||||||
|
|
||||||
|
args := []interface{}{tenantID}
|
||||||
|
argIdx := 2
|
||||||
|
|
||||||
|
if filters != nil {
|
||||||
|
if filters.Status != "" {
|
||||||
|
query += fmt.Sprintf(" AND status = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Status))
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filters.Category != "" {
|
||||||
|
query += fmt.Sprintf(" AND category = $%d", argIdx)
|
||||||
|
args = append(args, string(filters.Category))
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query += " ORDER BY created_at DESC"
|
||||||
|
|
||||||
|
if filters != nil && filters.Limit > 0 {
|
||||||
|
query += fmt.Sprintf(" LIMIT $%d", argIdx)
|
||||||
|
args = append(args, filters.Limit)
|
||||||
|
argIdx++
|
||||||
|
|
||||||
|
if filters.Offset > 0 {
|
||||||
|
query += fmt.Sprintf(" OFFSET $%d", argIdx)
|
||||||
|
args = append(args, filters.Offset)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.pool.Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var reports []Report
|
||||||
|
for rows.Next() {
|
||||||
|
var report Report
|
||||||
|
var category, status string
|
||||||
|
var auditTrailJSON []byte
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&report.ID, &report.TenantID, &report.ReferenceNumber, &report.AccessKey,
|
||||||
|
&category, &status, &report.Title, &report.Description,
|
||||||
|
&report.IsAnonymous, &report.ReporterName, &report.ReporterEmail, &report.ReporterPhone,
|
||||||
|
&report.ReceivedAt, &report.DeadlineAcknowledgment, &report.DeadlineFeedback,
|
||||||
|
&report.AcknowledgedAt, &report.ClosedAt, &report.AssignedTo,
|
||||||
|
&auditTrailJSON, &report.Resolution,
|
||||||
|
&report.CreatedAt, &report.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
report.Category = ReportCategory(category)
|
||||||
|
report.Status = ReportStatus(status)
|
||||||
|
json.Unmarshal(auditTrailJSON, &report.AuditTrail)
|
||||||
|
|
||||||
|
// Do not expose access key in list responses
|
||||||
|
report.AccessKey = ""
|
||||||
|
|
||||||
|
reports = append(reports, report)
|
||||||
|
}
|
||||||
|
|
||||||
|
return reports, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateReport updates a report
|
||||||
|
func (s *Store) UpdateReport(ctx context.Context, report *Report) error {
|
||||||
|
report.UpdatedAt = time.Now().UTC()
|
||||||
|
|
||||||
|
auditTrailJSON, _ := json.Marshal(report.AuditTrail)
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE whistleblower_reports SET
|
||||||
|
category = $2, status = $3, title = $4, description = $5,
|
||||||
|
assigned_to = $6, audit_trail = $7, resolution = $8,
|
||||||
|
updated_at = $9
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
report.ID,
|
||||||
|
string(report.Category), string(report.Status), report.Title, report.Description,
|
||||||
|
report.AssignedTo, auditTrailJSON, report.Resolution,
|
||||||
|
report.UpdatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeReport acknowledges a report, setting acknowledged_at and adding an audit entry
|
||||||
|
func (s *Store) AcknowledgeReport(ctx context.Context, id uuid.UUID, userID uuid.UUID) error {
|
||||||
|
report, err := s.GetReport(ctx, id)
|
||||||
|
if err != nil || report == nil {
|
||||||
|
return fmt.Errorf("report not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
now := time.Now().UTC()
|
||||||
|
report.AcknowledgedAt = &now
|
||||||
|
report.Status = ReportStatusAcknowledged
|
||||||
|
report.UpdatedAt = now
|
||||||
|
|
||||||
|
report.AuditTrail = append(report.AuditTrail, AuditEntry{
|
||||||
|
Timestamp: now,
|
||||||
|
Action: "report_acknowledged",
|
||||||
|
UserID: userID.String(),
|
||||||
|
Details: "Report acknowledged within HinSchG deadline",
|
||||||
|
})
|
||||||
|
|
||||||
|
auditTrailJSON, _ := json.Marshal(report.AuditTrail)
|
||||||
|
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
UPDATE whistleblower_reports SET
|
||||||
|
status = $2, acknowledged_at = $3,
|
||||||
|
audit_trail = $4, updated_at = $5
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
id, string(ReportStatusAcknowledged), now,
|
||||||
|
auditTrailJSON, now,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloseReport closes a report with a resolution
|
||||||
|
func (s *Store) CloseReport(ctx context.Context, id uuid.UUID, userID uuid.UUID, resolution string) error {
|
||||||
|
report, err := s.GetReport(ctx, id)
|
||||||
|
if err != nil || report == nil {
|
||||||
|
return fmt.Errorf("report not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
now := time.Now().UTC()
|
||||||
|
report.ClosedAt = &now
|
||||||
|
report.Status = ReportStatusClosed
|
||||||
|
report.Resolution = resolution
|
||||||
|
report.UpdatedAt = now
|
||||||
|
|
||||||
|
report.AuditTrail = append(report.AuditTrail, AuditEntry{
|
||||||
|
Timestamp: now,
|
||||||
|
Action: "report_closed",
|
||||||
|
UserID: userID.String(),
|
||||||
|
Details: "Report closed with resolution: " + resolution,
|
||||||
|
})
|
||||||
|
|
||||||
|
auditTrailJSON, _ := json.Marshal(report.AuditTrail)
|
||||||
|
|
||||||
|
_, err = s.pool.Exec(ctx, `
|
||||||
|
UPDATE whistleblower_reports SET
|
||||||
|
status = $2, closed_at = $3, resolution = $4,
|
||||||
|
audit_trail = $5, updated_at = $6
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
id, string(ReportStatusClosed), now, resolution,
|
||||||
|
auditTrailJSON, now,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteReport deletes a report and its related data (cascading via FK)
|
||||||
|
func (s *Store) DeleteReport(ctx context.Context, id uuid.UUID) error {
|
||||||
|
_, err := s.pool.Exec(ctx, "DELETE FROM whistleblower_measures WHERE report_id = $1", id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = s.pool.Exec(ctx, "DELETE FROM whistleblower_messages WHERE report_id = $1", id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = s.pool.Exec(ctx, "DELETE FROM whistleblower_reports WHERE id = $1", id)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Message Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddMessage adds an anonymous message to a report
|
||||||
|
func (s *Store) AddMessage(ctx context.Context, msg *AnonymousMessage) error {
|
||||||
|
msg.ID = uuid.New()
|
||||||
|
msg.SentAt = time.Now().UTC()
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO whistleblower_messages (
|
||||||
|
id, report_id, direction, content, sent_at, read_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5, $6
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
msg.ID, msg.ReportID, string(msg.Direction), msg.Content, msg.SentAt, msg.ReadAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListMessages lists messages for a report
|
||||||
|
func (s *Store) ListMessages(ctx context.Context, reportID uuid.UUID) ([]AnonymousMessage, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, report_id, direction, content, sent_at, read_at
|
||||||
|
FROM whistleblower_messages WHERE report_id = $1
|
||||||
|
ORDER BY sent_at ASC
|
||||||
|
`, reportID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var messages []AnonymousMessage
|
||||||
|
for rows.Next() {
|
||||||
|
var msg AnonymousMessage
|
||||||
|
var direction string
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&msg.ID, &msg.ReportID, &direction, &msg.Content, &msg.SentAt, &msg.ReadAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
msg.Direction = MessageDirection(direction)
|
||||||
|
messages = append(messages, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Measure Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// AddMeasure adds a corrective measure to a report
|
||||||
|
func (s *Store) AddMeasure(ctx context.Context, measure *Measure) error {
|
||||||
|
measure.ID = uuid.New()
|
||||||
|
measure.CreatedAt = time.Now().UTC()
|
||||||
|
if measure.Status == "" {
|
||||||
|
measure.Status = MeasureStatusPlanned
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
INSERT INTO whistleblower_measures (
|
||||||
|
id, report_id, title, description, status,
|
||||||
|
responsible, due_date, completed_at, created_at
|
||||||
|
) VALUES (
|
||||||
|
$1, $2, $3, $4, $5,
|
||||||
|
$6, $7, $8, $9
|
||||||
|
)
|
||||||
|
`,
|
||||||
|
measure.ID, measure.ReportID, measure.Title, measure.Description, string(measure.Status),
|
||||||
|
measure.Responsible, measure.DueDate, measure.CompletedAt, measure.CreatedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListMeasures lists measures for a report
|
||||||
|
func (s *Store) ListMeasures(ctx context.Context, reportID uuid.UUID) ([]Measure, error) {
|
||||||
|
rows, err := s.pool.Query(ctx, `
|
||||||
|
SELECT
|
||||||
|
id, report_id, title, description, status,
|
||||||
|
responsible, due_date, completed_at, created_at
|
||||||
|
FROM whistleblower_measures WHERE report_id = $1
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
`, reportID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var measures []Measure
|
||||||
|
for rows.Next() {
|
||||||
|
var m Measure
|
||||||
|
var status string
|
||||||
|
|
||||||
|
err := rows.Scan(
|
||||||
|
&m.ID, &m.ReportID, &m.Title, &m.Description, &status,
|
||||||
|
&m.Responsible, &m.DueDate, &m.CompletedAt, &m.CreatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
m.Status = MeasureStatus(status)
|
||||||
|
measures = append(measures, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
return measures, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMeasure updates a measure
|
||||||
|
func (s *Store) UpdateMeasure(ctx context.Context, measure *Measure) error {
|
||||||
|
_, err := s.pool.Exec(ctx, `
|
||||||
|
UPDATE whistleblower_measures SET
|
||||||
|
title = $2, description = $3, status = $4,
|
||||||
|
responsible = $5, due_date = $6, completed_at = $7
|
||||||
|
WHERE id = $1
|
||||||
|
`,
|
||||||
|
measure.ID,
|
||||||
|
measure.Title, measure.Description, string(measure.Status),
|
||||||
|
measure.Responsible, measure.DueDate, measure.CompletedAt,
|
||||||
|
)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Statistics
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetStatistics returns aggregated whistleblower statistics for a tenant
|
||||||
|
func (s *Store) GetStatistics(ctx context.Context, tenantID uuid.UUID) (*WhistleblowerStatistics, error) {
|
||||||
|
stats := &WhistleblowerStatistics{
|
||||||
|
ByStatus: make(map[string]int),
|
||||||
|
ByCategory: make(map[string]int),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Total reports
|
||||||
|
s.pool.QueryRow(ctx,
|
||||||
|
"SELECT COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1",
|
||||||
|
tenantID).Scan(&stats.TotalReports)
|
||||||
|
|
||||||
|
// By status
|
||||||
|
rows, err := s.pool.Query(ctx,
|
||||||
|
"SELECT status, COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1 GROUP BY status",
|
||||||
|
tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var status string
|
||||||
|
var count int
|
||||||
|
rows.Scan(&status, &count)
|
||||||
|
stats.ByStatus[status] = count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// By category
|
||||||
|
rows, err = s.pool.Query(ctx,
|
||||||
|
"SELECT category, COUNT(*) FROM whistleblower_reports WHERE tenant_id = $1 GROUP BY category",
|
||||||
|
tenantID)
|
||||||
|
if err == nil {
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var category string
|
||||||
|
var count int
|
||||||
|
rows.Scan(&category, &count)
|
||||||
|
stats.ByCategory[category] = count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overdue acknowledgments: reports past deadline_acknowledgment that haven't been acknowledged
|
||||||
|
s.pool.QueryRow(ctx, `
|
||||||
|
SELECT COUNT(*) FROM whistleblower_reports
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
AND acknowledged_at IS NULL
|
||||||
|
AND status = 'new'
|
||||||
|
AND deadline_acknowledgment < NOW()
|
||||||
|
`, tenantID).Scan(&stats.OverdueAcknowledgments)
|
||||||
|
|
||||||
|
// Overdue feedbacks: reports past deadline_feedback that are still open
|
||||||
|
s.pool.QueryRow(ctx, `
|
||||||
|
SELECT COUNT(*) FROM whistleblower_reports
|
||||||
|
WHERE tenant_id = $1
|
||||||
|
AND closed_at IS NULL
|
||||||
|
AND status NOT IN ('closed', 'rejected')
|
||||||
|
AND deadline_feedback < NOW()
|
||||||
|
`, tenantID).Scan(&stats.OverdueFeedbacks)
|
||||||
|
|
||||||
|
// Average resolution days (for closed reports)
|
||||||
|
s.pool.QueryRow(ctx, `
|
||||||
|
SELECT COALESCE(AVG(EXTRACT(EPOCH FROM (closed_at - received_at)) / 86400), 0)
|
||||||
|
FROM whistleblower_reports
|
||||||
|
WHERE tenant_id = $1 AND closed_at IS NOT NULL
|
||||||
|
`, tenantID).Scan(&stats.AvgResolutionDays)
|
||||||
|
|
||||||
|
return stats, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Sequence Number
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// GetNextSequenceNumber gets and increments the sequence number for reference number generation
|
||||||
|
func (s *Store) GetNextSequenceNumber(ctx context.Context, tenantID uuid.UUID, year int) (int, error) {
|
||||||
|
var seq int
|
||||||
|
|
||||||
|
err := s.pool.QueryRow(ctx, `
|
||||||
|
INSERT INTO whistleblower_sequences (tenant_id, year, last_sequence)
|
||||||
|
VALUES ($1, $2, 1)
|
||||||
|
ON CONFLICT (tenant_id, year) DO UPDATE SET
|
||||||
|
last_sequence = whistleblower_sequences.last_sequence + 1
|
||||||
|
RETURNING last_sequence
|
||||||
|
`, tenantID, year).Scan(&seq)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return seq, nil
|
||||||
|
}
|
||||||
@@ -6,8 +6,8 @@
|
|||||||
-- Roadmaps table
|
-- Roadmaps table
|
||||||
CREATE TABLE IF NOT EXISTS roadmaps (
|
CREATE TABLE IF NOT EXISTS roadmaps (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
namespace_id UUID REFERENCES namespaces(id) ON DELETE SET NULL,
|
namespace_id UUID REFERENCES compliance_namespaces(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
title VARCHAR(255) NOT NULL,
|
title VARCHAR(255) NOT NULL,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
@@ -93,7 +93,7 @@ CREATE TABLE IF NOT EXISTS roadmap_items (
|
|||||||
-- Import jobs table
|
-- Import jobs table
|
||||||
CREATE TABLE IF NOT EXISTS roadmap_import_jobs (
|
CREATE TABLE IF NOT EXISTS roadmap_import_jobs (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
roadmap_id UUID REFERENCES roadmaps(id) ON DELETE SET NULL,
|
roadmap_id UUID REFERENCES roadmaps(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
-- File info
|
-- File info
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
-- Workshop sessions table
|
-- Workshop sessions table
|
||||||
CREATE TABLE IF NOT EXISTS workshop_sessions (
|
CREATE TABLE IF NOT EXISTS workshop_sessions (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
namespace_id UUID REFERENCES namespaces(id) ON DELETE SET NULL,
|
namespace_id UUID REFERENCES compliance_namespaces(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
-- Session info
|
-- Session info
|
||||||
title VARCHAR(255) NOT NULL,
|
title VARCHAR(255) NOT NULL,
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
-- Portfolios table
|
-- Portfolios table
|
||||||
CREATE TABLE IF NOT EXISTS portfolios (
|
CREATE TABLE IF NOT EXISTS portfolios (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
tenant_id UUID NOT NULL REFERENCES tenants(id) ON DELETE CASCADE,
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
namespace_id UUID REFERENCES namespaces(id) ON DELETE SET NULL,
|
namespace_id UUID REFERENCES compliance_namespaces(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
-- Info
|
-- Info
|
||||||
name VARCHAR(255) NOT NULL,
|
name VARCHAR(255) NOT NULL,
|
||||||
|
|||||||
159
ai-compliance-sdk/migrations/008_academy_schema.sql
Normal file
159
ai-compliance-sdk/migrations/008_academy_schema.sql
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Migration 008: Academy (E-Learning / Compliance Academy) Schema
|
||||||
|
-- Compliance training courses, enrollments, and certificate management
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Academy courses table
|
||||||
|
CREATE TABLE IF NOT EXISTS academy_courses (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Course info
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
category VARCHAR(50) NOT NULL, -- 'dsgvo_basics', 'it_security', 'ai_literacy', 'whistleblower_protection', 'custom'
|
||||||
|
duration_minutes INT DEFAULT 0,
|
||||||
|
required_for_roles JSONB DEFAULT '[]', -- Array of role strings
|
||||||
|
|
||||||
|
-- Status
|
||||||
|
is_active BOOLEAN DEFAULT TRUE,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Academy lessons table
|
||||||
|
CREATE TABLE IF NOT EXISTS academy_lessons (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
course_id UUID NOT NULL REFERENCES academy_courses(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Lesson info
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
lesson_type VARCHAR(50) NOT NULL, -- 'video', 'text', 'quiz', 'interactive'
|
||||||
|
content_url TEXT,
|
||||||
|
duration_minutes INT DEFAULT 0,
|
||||||
|
order_index INT DEFAULT 0,
|
||||||
|
|
||||||
|
-- Quiz questions (only for lesson_type = 'quiz')
|
||||||
|
quiz_questions JSONB DEFAULT '[]', -- Array of {question, options, correct_index, explanation}
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Academy enrollments table
|
||||||
|
CREATE TABLE IF NOT EXISTS academy_enrollments (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
course_id UUID NOT NULL REFERENCES academy_courses(id) ON DELETE CASCADE,
|
||||||
|
user_id UUID NOT NULL,
|
||||||
|
|
||||||
|
-- User info (denormalized for reporting)
|
||||||
|
user_name VARCHAR(255) NOT NULL,
|
||||||
|
user_email VARCHAR(255) NOT NULL,
|
||||||
|
|
||||||
|
-- Progress tracking
|
||||||
|
status VARCHAR(50) DEFAULT 'not_started', -- 'not_started', 'in_progress', 'completed', 'expired'
|
||||||
|
progress_percent INT DEFAULT 0, -- 0-100
|
||||||
|
current_lesson_index INT DEFAULT 0,
|
||||||
|
|
||||||
|
-- Timestamps
|
||||||
|
started_at TIMESTAMPTZ,
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
deadline TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Academy certificates table
|
||||||
|
CREATE TABLE IF NOT EXISTS academy_certificates (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
enrollment_id UUID NOT NULL UNIQUE REFERENCES academy_enrollments(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Certificate info
|
||||||
|
user_name VARCHAR(255) NOT NULL,
|
||||||
|
course_title VARCHAR(255) NOT NULL,
|
||||||
|
issued_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
valid_until TIMESTAMPTZ,
|
||||||
|
pdf_url TEXT,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Course indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_courses_tenant ON academy_courses(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_courses_category ON academy_courses(tenant_id, category);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_courses_active ON academy_courses(tenant_id, is_active);
|
||||||
|
|
||||||
|
-- Lesson indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_lessons_course ON academy_lessons(course_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_lessons_order ON academy_lessons(course_id, order_index);
|
||||||
|
|
||||||
|
-- Enrollment indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_tenant ON academy_enrollments(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_course ON academy_enrollments(course_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_user ON academy_enrollments(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_status ON academy_enrollments(tenant_id, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_deadline ON academy_enrollments(deadline) WHERE deadline IS NOT NULL AND status NOT IN ('completed', 'expired');
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_enrollments_tenant_course ON academy_enrollments(tenant_id, course_id);
|
||||||
|
|
||||||
|
-- Certificate indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_certificates_enrollment ON academy_certificates(enrollment_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_academy_certificates_valid_until ON academy_certificates(valid_until) WHERE valid_until IS NOT NULL;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Reuse existing update_updated_at_column function
|
||||||
|
|
||||||
|
-- Courses trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_academy_courses_updated_at ON academy_courses;
|
||||||
|
CREATE TRIGGER update_academy_courses_updated_at
|
||||||
|
BEFORE UPDATE ON academy_courses
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Lessons trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_academy_lessons_updated_at ON academy_lessons;
|
||||||
|
CREATE TRIGGER update_academy_lessons_updated_at
|
||||||
|
BEFORE UPDATE ON academy_lessons
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Enrollments trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_academy_enrollments_updated_at ON academy_enrollments;
|
||||||
|
CREATE TRIGGER update_academy_enrollments_updated_at
|
||||||
|
BEFORE UPDATE ON academy_enrollments
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Certificates trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_academy_certificates_updated_at ON academy_certificates;
|
||||||
|
CREATE TRIGGER update_academy_certificates_updated_at
|
||||||
|
BEFORE UPDATE ON academy_certificates
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Comments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
COMMENT ON TABLE academy_courses IS 'Compliance training courses (DSGVO, IT-Security, AI Literacy, Whistleblower)';
|
||||||
|
COMMENT ON TABLE academy_lessons IS 'Individual lessons within a course (video, text, quiz, interactive)';
|
||||||
|
COMMENT ON TABLE academy_enrollments IS 'User enrollments in courses with progress tracking';
|
||||||
|
COMMENT ON TABLE academy_certificates IS 'Completion certificates issued for finished enrollments';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN academy_courses.category IS 'Course category: dsgvo_basics, it_security, ai_literacy, whistleblower_protection, custom';
|
||||||
|
COMMENT ON COLUMN academy_courses.required_for_roles IS 'JSON array of role names that are required to complete this course';
|
||||||
|
COMMENT ON COLUMN academy_lessons.quiz_questions IS 'JSON array of quiz questions: [{question, options, correct_index, explanation}]';
|
||||||
|
COMMENT ON COLUMN academy_enrollments.status IS 'Enrollment status: not_started, in_progress, completed, expired';
|
||||||
|
COMMENT ON COLUMN academy_enrollments.progress_percent IS 'Course completion percentage (0-100)';
|
||||||
|
COMMENT ON COLUMN academy_certificates.enrollment_id IS 'One-to-one relationship with enrollment (UNIQUE constraint)';
|
||||||
141
ai-compliance-sdk/migrations/009_whistleblower_schema.sql
Normal file
141
ai-compliance-sdk/migrations/009_whistleblower_schema.sql
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Migration 009: Whistleblower / Hinweisgebersystem (HinSchG)
|
||||||
|
-- Implements the German Whistleblower Protection Act (Hinweisgeberschutzgesetz)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Whistleblower reports table
|
||||||
|
CREATE TABLE IF NOT EXISTS whistleblower_reports (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Identification
|
||||||
|
reference_number VARCHAR(20) NOT NULL UNIQUE, -- e.g. "WB-2026-0001"
|
||||||
|
access_key VARCHAR(50) NOT NULL UNIQUE, -- for anonymous reporter access
|
||||||
|
|
||||||
|
-- Report content
|
||||||
|
category VARCHAR(50) NOT NULL, -- corruption, fraud, data_protection, discrimination, environment, competition, product_safety, tax_evasion, other
|
||||||
|
status VARCHAR(50) NOT NULL DEFAULT 'new', -- new, acknowledged, under_review, investigation, measures_taken, closed, rejected
|
||||||
|
title VARCHAR(500) NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
|
||||||
|
-- Reporter info (nullable for anonymous reports)
|
||||||
|
is_anonymous BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
reporter_name VARCHAR(255),
|
||||||
|
reporter_email VARCHAR(255),
|
||||||
|
reporter_phone VARCHAR(100),
|
||||||
|
|
||||||
|
-- HinSchG deadlines
|
||||||
|
received_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
deadline_acknowledgment TIMESTAMPTZ NOT NULL, -- 7 days from received_at per HinSchG
|
||||||
|
deadline_feedback TIMESTAMPTZ NOT NULL, -- 3 months from received_at per HinSchG
|
||||||
|
|
||||||
|
-- Status timestamps
|
||||||
|
acknowledged_at TIMESTAMPTZ,
|
||||||
|
closed_at TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Assignment
|
||||||
|
assigned_to UUID, -- user responsible for handling
|
||||||
|
|
||||||
|
-- Audit trail (JSONB array of {timestamp, action, user_id, details})
|
||||||
|
audit_trail JSONB NOT NULL DEFAULT '[]',
|
||||||
|
|
||||||
|
-- Resolution
|
||||||
|
resolution TEXT,
|
||||||
|
|
||||||
|
-- Timestamps
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Whistleblower messages table (anonymous communication channel)
|
||||||
|
CREATE TABLE IF NOT EXISTS whistleblower_messages (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
report_id UUID NOT NULL REFERENCES whistleblower_reports(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Message
|
||||||
|
direction VARCHAR(30) NOT NULL, -- reporter_to_admin, admin_to_reporter
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
sent_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
read_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Whistleblower measures table (corrective measures)
|
||||||
|
CREATE TABLE IF NOT EXISTS whistleblower_measures (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
report_id UUID NOT NULL REFERENCES whistleblower_reports(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Measure details
|
||||||
|
title VARCHAR(500) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
status VARCHAR(50) NOT NULL DEFAULT 'planned', -- planned, in_progress, completed
|
||||||
|
responsible VARCHAR(255),
|
||||||
|
due_date TIMESTAMPTZ,
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Timestamps
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Sequence table for reference number generation
|
||||||
|
CREATE TABLE IF NOT EXISTS whistleblower_sequences (
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
year INT NOT NULL,
|
||||||
|
last_sequence INT NOT NULL DEFAULT 0,
|
||||||
|
PRIMARY KEY (tenant_id, year)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Report indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_tenant ON whistleblower_reports(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_access_key ON whistleblower_reports(access_key);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_reference ON whistleblower_reports(reference_number);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_status ON whistleblower_reports(tenant_id, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_category ON whistleblower_reports(tenant_id, category);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_received ON whistleblower_reports(tenant_id, received_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_reports_deadlines ON whistleblower_reports(deadline_acknowledgment, deadline_feedback)
|
||||||
|
WHERE acknowledged_at IS NULL OR closed_at IS NULL;
|
||||||
|
|
||||||
|
-- Message indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_messages_report ON whistleblower_messages(report_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_messages_sent ON whistleblower_messages(report_id, sent_at);
|
||||||
|
|
||||||
|
-- Measure indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_measures_report ON whistleblower_measures(report_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_whistleblower_measures_status ON whistleblower_measures(report_id, status);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Reuse existing update_updated_at_column function
|
||||||
|
|
||||||
|
-- Reports trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_whistleblower_reports_updated_at ON whistleblower_reports;
|
||||||
|
CREATE TRIGGER update_whistleblower_reports_updated_at
|
||||||
|
BEFORE UPDATE ON whistleblower_reports
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Measures trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_whistleblower_measures_updated_at ON whistleblower_measures;
|
||||||
|
CREATE TRIGGER update_whistleblower_measures_updated_at
|
||||||
|
BEFORE UPDATE ON whistleblower_measures
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Comments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
COMMENT ON TABLE whistleblower_reports IS 'Whistleblower reports per HinSchG (Hinweisgeberschutzgesetz)';
|
||||||
|
COMMENT ON TABLE whistleblower_messages IS 'Anonymous communication channel between reporter and admin';
|
||||||
|
COMMENT ON TABLE whistleblower_measures IS 'Corrective measures taken for whistleblower reports';
|
||||||
|
COMMENT ON TABLE whistleblower_sequences IS 'Sequence numbers for reference number generation per tenant and year';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN whistleblower_reports.reference_number IS 'Human-readable reference number (e.g. WB-2026-0001)';
|
||||||
|
COMMENT ON COLUMN whistleblower_reports.access_key IS 'Secret key for anonymous reporter to access their report';
|
||||||
|
COMMENT ON COLUMN whistleblower_reports.deadline_acknowledgment IS '7-day deadline per HinSchG §17 Abs. 1';
|
||||||
|
COMMENT ON COLUMN whistleblower_reports.deadline_feedback IS '3-month deadline per HinSchG §17 Abs. 2';
|
||||||
|
COMMENT ON COLUMN whistleblower_reports.audit_trail IS 'JSON array of audit entries tracking all actions on the report';
|
||||||
111
ai-compliance-sdk/migrations/010_incidents_schema.sql
Normal file
111
ai-compliance-sdk/migrations/010_incidents_schema.sql
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Migration 010: Incident/Breach Management Schema
|
||||||
|
-- DSGVO Art. 33 (Authority Notification) & Art. 34 (Data Subject Notification)
|
||||||
|
--
|
||||||
|
-- Art. 33 requires notification of the supervisory authority within 72 hours
|
||||||
|
-- of becoming aware of a personal data breach, unless the breach is unlikely
|
||||||
|
-- to result in a risk to the rights and freedoms of natural persons.
|
||||||
|
--
|
||||||
|
-- Art. 34 requires notification of affected data subjects without undue delay
|
||||||
|
-- when the breach is likely to result in a high risk to their rights and freedoms.
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Incident incidents table
|
||||||
|
CREATE TABLE IF NOT EXISTS incident_incidents (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Incident info
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
category VARCHAR(50) NOT NULL, -- data_breach, unauthorized_access, data_loss, system_compromise, phishing, ransomware, insider_threat, physical_breach, other
|
||||||
|
status VARCHAR(50) DEFAULT 'detected', -- detected, assessment, containment, notification_required, notification_sent, remediation, closed
|
||||||
|
severity VARCHAR(50) NOT NULL, -- critical, high, medium, low
|
||||||
|
|
||||||
|
-- Detection & reporting
|
||||||
|
detected_at TIMESTAMPTZ NOT NULL,
|
||||||
|
reported_by UUID NOT NULL,
|
||||||
|
|
||||||
|
-- Affected scope
|
||||||
|
affected_data_categories JSONB DEFAULT '[]', -- e.g. ["personal_data", "health_data", "financial_data"]
|
||||||
|
affected_data_subject_count INT DEFAULT 0,
|
||||||
|
affected_systems JSONB DEFAULT '[]', -- e.g. ["crm", "email_server", "database"]
|
||||||
|
|
||||||
|
-- Assessments & notifications (JSONB embedded objects)
|
||||||
|
risk_assessment JSONB, -- {likelihood, impact, risk_level, assessed_at, assessed_by, notes}
|
||||||
|
authority_notification JSONB, -- {status, deadline, submitted_at, authority_name, reference_number, contact_person, notes}
|
||||||
|
data_subject_notification JSONB, -- {required, status, sent_at, affected_count, notification_text, channel}
|
||||||
|
|
||||||
|
-- Resolution
|
||||||
|
root_cause TEXT,
|
||||||
|
lessons_learned TEXT,
|
||||||
|
|
||||||
|
-- Timeline (JSONB array of events)
|
||||||
|
timeline JSONB DEFAULT '[]', -- [{timestamp, action, user_id, details}, ...]
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
closed_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Incident measures table (corrective and preventive measures)
|
||||||
|
CREATE TABLE IF NOT EXISTS incident_measures (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
incident_id UUID NOT NULL REFERENCES incident_incidents(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Measure info
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
measure_type VARCHAR(50) NOT NULL, -- immediate, long_term
|
||||||
|
status VARCHAR(50) DEFAULT 'planned', -- planned, in_progress, completed
|
||||||
|
responsible VARCHAR(255),
|
||||||
|
due_date TIMESTAMPTZ,
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Incident indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_incidents_tenant ON incident_incidents(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_incidents_status ON incident_incidents(tenant_id, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_incidents_severity ON incident_incidents(tenant_id, severity);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_incidents_detected_at ON incident_incidents(detected_at DESC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_incidents_category ON incident_incidents(tenant_id, category);
|
||||||
|
|
||||||
|
-- Measure indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_measures_incident ON incident_measures(incident_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_incident_measures_status ON incident_measures(incident_id, status);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Reuse existing update_updated_at_column function
|
||||||
|
|
||||||
|
-- Incidents trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_incident_incidents_updated_at ON incident_incidents;
|
||||||
|
CREATE TRIGGER update_incident_incidents_updated_at
|
||||||
|
BEFORE UPDATE ON incident_incidents
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Comments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
COMMENT ON TABLE incident_incidents IS 'Security and data breach incidents per DSGVO Art. 33/34';
|
||||||
|
COMMENT ON TABLE incident_measures IS 'Corrective and preventive measures for incidents';
|
||||||
|
|
||||||
|
COMMENT ON COLUMN incident_incidents.detected_at IS 'When the incident was first detected - starts the 72h Art. 33 notification clock';
|
||||||
|
COMMENT ON COLUMN incident_incidents.authority_notification IS 'JSONB: Supervisory authority notification tracking per DSGVO Art. 33 (72h deadline)';
|
||||||
|
COMMENT ON COLUMN incident_incidents.data_subject_notification IS 'JSONB: Data subject notification tracking per DSGVO Art. 34';
|
||||||
|
COMMENT ON COLUMN incident_incidents.risk_assessment IS 'JSONB: Risk assessment with likelihood, impact, and auto-calculated risk level';
|
||||||
|
COMMENT ON COLUMN incident_incidents.timeline IS 'JSONB array: Chronological record of all actions taken during incident response';
|
||||||
|
COMMENT ON COLUMN incident_incidents.affected_data_categories IS 'JSONB array: Categories of personal data affected (e.g. health, financial)';
|
||||||
|
COMMENT ON COLUMN incident_incidents.affected_systems IS 'JSONB array: Systems affected by the incident';
|
||||||
|
COMMENT ON COLUMN incident_measures.measure_type IS 'immediate = short-term containment, long_term = preventive/structural fix';
|
||||||
356
ai-compliance-sdk/migrations/011_vendor_compliance_schema.sql
Normal file
356
ai-compliance-sdk/migrations/011_vendor_compliance_schema.sql
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Migration 011: Vendor Compliance Schema
|
||||||
|
-- Vendor Management, Contract/AVV Management, Findings, Templates
|
||||||
|
--
|
||||||
|
-- Implements DSGVO Art. 28 (Auftragsverarbeitung) requirements:
|
||||||
|
-- - Vendor registry with risk scoring and classification
|
||||||
|
-- - Contract/AVV document management with AI-assisted review
|
||||||
|
-- - Compliance findings from contract analysis
|
||||||
|
-- - Control instances for vendor-level control assessments
|
||||||
|
-- - Pre-filled templates for vendors, processing activities, and TOMs
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Vendors (Service Provider Registry)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS vendor_vendors (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Basic info
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
legal_form VARCHAR(100),
|
||||||
|
country VARCHAR(10) NOT NULL DEFAULT 'DE', -- ISO 3166-1 alpha-2
|
||||||
|
address JSONB, -- {street, city, postalCode, country, state}
|
||||||
|
website VARCHAR(500),
|
||||||
|
|
||||||
|
-- Contact
|
||||||
|
contact_name VARCHAR(255),
|
||||||
|
contact_email VARCHAR(255),
|
||||||
|
contact_phone VARCHAR(100),
|
||||||
|
contact_department VARCHAR(255),
|
||||||
|
|
||||||
|
-- Role & Classification
|
||||||
|
role VARCHAR(50) NOT NULL DEFAULT 'PROCESSOR', -- PROCESSOR, CONTROLLER, JOINT_CONTROLLER, SUB_PROCESSOR, THIRD_PARTY
|
||||||
|
service_category VARCHAR(50), -- HOSTING, CRM, ERP, ANALYTICS, etc. (19 categories)
|
||||||
|
service_description TEXT,
|
||||||
|
data_access_level VARCHAR(50) DEFAULT 'NONE', -- NONE, POTENTIAL, ADMINISTRATIVE, CONTENT
|
||||||
|
|
||||||
|
-- Processing & Compliance
|
||||||
|
processing_locations JSONB DEFAULT '[]', -- [{country, region, isPrimary, isEU, isAdequate}]
|
||||||
|
certifications JSONB DEFAULT '[]', -- ["ISO 27001", "SOC 2", etc.]
|
||||||
|
|
||||||
|
-- Risk Scoring (0-100)
|
||||||
|
inherent_risk_score INT DEFAULT 0,
|
||||||
|
residual_risk_score INT DEFAULT 0,
|
||||||
|
manual_risk_adjustment INT,
|
||||||
|
|
||||||
|
-- Contract & Review
|
||||||
|
review_frequency VARCHAR(50) DEFAULT 'ANNUAL', -- QUARTERLY, SEMI_ANNUAL, ANNUAL, BIENNIAL
|
||||||
|
last_review_date TIMESTAMPTZ,
|
||||||
|
next_review_date TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Links
|
||||||
|
processing_activity_ids JSONB DEFAULT '[]', -- UUIDs of linked processing activities
|
||||||
|
|
||||||
|
-- Status
|
||||||
|
status VARCHAR(50) DEFAULT 'ACTIVE', -- ACTIVE, INACTIVE, PENDING_REVIEW, TERMINATED
|
||||||
|
|
||||||
|
-- Template reference (if created from template)
|
||||||
|
template_id VARCHAR(100),
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
created_by UUID NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Contracts (including AVV/DPA)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS vendor_contracts (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
vendor_id UUID NOT NULL REFERENCES vendor_vendors(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Document info
|
||||||
|
file_name VARCHAR(500),
|
||||||
|
original_name VARCHAR(500),
|
||||||
|
mime_type VARCHAR(100),
|
||||||
|
file_size BIGINT,
|
||||||
|
storage_path VARCHAR(1000), -- MinIO path
|
||||||
|
|
||||||
|
-- Classification
|
||||||
|
document_type VARCHAR(50) NOT NULL, -- AVV, MSA, SLA, SCC, NDA, TOM_ANNEX, CERTIFICATION, SUB_PROCESSOR_LIST
|
||||||
|
|
||||||
|
-- Metadata (extracted or manual)
|
||||||
|
parties JSONB, -- [{name, role, address}]
|
||||||
|
effective_date DATE,
|
||||||
|
expiration_date DATE,
|
||||||
|
auto_renewal BOOLEAN DEFAULT FALSE,
|
||||||
|
renewal_notice_period VARCHAR(100),
|
||||||
|
|
||||||
|
-- Review Status
|
||||||
|
review_status VARCHAR(50) DEFAULT 'PENDING', -- PENDING, IN_PROGRESS, COMPLETED, FAILED
|
||||||
|
review_completed_at TIMESTAMPTZ,
|
||||||
|
compliance_score INT, -- 0-100
|
||||||
|
|
||||||
|
-- Versioning
|
||||||
|
version VARCHAR(50) DEFAULT '1.0',
|
||||||
|
previous_version_id UUID REFERENCES vendor_contracts(id),
|
||||||
|
|
||||||
|
-- Content (extracted text for analysis)
|
||||||
|
extracted_text TEXT,
|
||||||
|
page_count INT,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
created_by UUID NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Findings (from contract reviews)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS vendor_findings (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
contract_id UUID REFERENCES vendor_contracts(id) ON DELETE CASCADE,
|
||||||
|
vendor_id UUID NOT NULL REFERENCES vendor_vendors(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Classification
|
||||||
|
finding_type VARCHAR(20) NOT NULL, -- OK, GAP, RISK, UNKNOWN
|
||||||
|
category VARCHAR(50) NOT NULL, -- AVV_CONTENT, SUBPROCESSOR, INCIDENT, AUDIT_RIGHTS, DELETION, TOM, TRANSFER, LIABILITY, SLA, DATA_SUBJECT_RIGHTS, CONFIDENTIALITY, INSTRUCTION, TERMINATION, GENERAL
|
||||||
|
severity VARCHAR(20) NOT NULL, -- LOW, MEDIUM, HIGH, CRITICAL
|
||||||
|
|
||||||
|
-- Content
|
||||||
|
title VARCHAR(500) NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
recommendation TEXT,
|
||||||
|
|
||||||
|
-- Citations (from contract text)
|
||||||
|
citations JSONB DEFAULT '[]', -- [{documentId, page, startChar, endChar, quotedText, quoteHash}]
|
||||||
|
|
||||||
|
-- Workflow
|
||||||
|
status VARCHAR(50) DEFAULT 'OPEN', -- OPEN, IN_PROGRESS, RESOLVED, ACCEPTED, FALSE_POSITIVE
|
||||||
|
assignee VARCHAR(255),
|
||||||
|
due_date DATE,
|
||||||
|
resolution TEXT,
|
||||||
|
resolved_at TIMESTAMPTZ,
|
||||||
|
resolved_by UUID,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Control Instances (applied controls per vendor)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS vendor_control_instances (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id) ON DELETE CASCADE,
|
||||||
|
vendor_id UUID NOT NULL REFERENCES vendor_vendors(id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- Control reference
|
||||||
|
control_id VARCHAR(100) NOT NULL, -- e.g., VND-TRF-01
|
||||||
|
control_domain VARCHAR(50), -- TRANSFER, AUDIT, DELETION, INCIDENT, SUBPROCESSOR, TOM, CONTRACT, DATA_SUBJECT, SECURITY, GOVERNANCE
|
||||||
|
|
||||||
|
-- Assessment
|
||||||
|
status VARCHAR(50) DEFAULT 'PLANNED', -- PASS, PARTIAL, FAIL, NOT_APPLICABLE, PLANNED
|
||||||
|
evidence_ids JSONB DEFAULT '[]',
|
||||||
|
notes TEXT,
|
||||||
|
|
||||||
|
-- Timing
|
||||||
|
last_assessed_at TIMESTAMPTZ,
|
||||||
|
last_assessed_by UUID,
|
||||||
|
next_assessment_date TIMESTAMPTZ,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
UNIQUE(tenant_id, vendor_id, control_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Templates (pre-filled templates for various entity types)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS compliance_templates (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
tenant_id UUID REFERENCES compliance_tenants(id) ON DELETE CASCADE, -- NULL for system templates
|
||||||
|
|
||||||
|
-- Template info
|
||||||
|
template_type VARCHAR(50) NOT NULL, -- VENDOR, PROCESSING_ACTIVITY, TOM, CONTROL_SET
|
||||||
|
template_id VARCHAR(100) NOT NULL UNIQUE, -- e.g., tpl-vendor-cloud-iaas
|
||||||
|
category VARCHAR(100), -- HR, SALES, MARKETING, CLOUD_INFRASTRUCTURE, etc.
|
||||||
|
|
||||||
|
-- Content
|
||||||
|
name_de VARCHAR(500) NOT NULL,
|
||||||
|
name_en VARCHAR(500) NOT NULL,
|
||||||
|
description_de TEXT,
|
||||||
|
description_en TEXT,
|
||||||
|
|
||||||
|
-- Template data (full template content as JSONB)
|
||||||
|
template_data JSONB NOT NULL,
|
||||||
|
|
||||||
|
-- Organization
|
||||||
|
industry VARCHAR(100), -- IT, HEALTHCARE, FINANCE, MANUFACTURING, RETAIL, etc.
|
||||||
|
tags JSONB DEFAULT '[]',
|
||||||
|
|
||||||
|
-- Metadata
|
||||||
|
is_system BOOLEAN DEFAULT FALSE, -- true = pre-installed, false = user-created
|
||||||
|
is_active BOOLEAN DEFAULT TRUE,
|
||||||
|
usage_count INT DEFAULT 0,
|
||||||
|
|
||||||
|
-- Audit
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: Vendors
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_tenant ON vendor_vendors(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_status ON vendor_vendors(tenant_id, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_role ON vendor_vendors(tenant_id, role);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_service_category ON vendor_vendors(tenant_id, service_category);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_next_review ON vendor_vendors(next_review_date)
|
||||||
|
WHERE next_review_date IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_vendors_template_id ON vendor_vendors(template_id)
|
||||||
|
WHERE template_id IS NOT NULL;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: Contracts
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_contracts_tenant ON vendor_contracts(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_contracts_vendor ON vendor_contracts(vendor_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_contracts_document_type ON vendor_contracts(tenant_id, document_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_contracts_review_status ON vendor_contracts(tenant_id, review_status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_contracts_expiration ON vendor_contracts(expiration_date)
|
||||||
|
WHERE expiration_date IS NOT NULL;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: Findings
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_tenant ON vendor_findings(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_vendor ON vendor_findings(vendor_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_contract ON vendor_findings(contract_id)
|
||||||
|
WHERE contract_id IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_severity ON vendor_findings(tenant_id, severity);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_status ON vendor_findings(tenant_id, status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_findings_category ON vendor_findings(tenant_id, category);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: Control Instances
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_control_instances_tenant ON vendor_control_instances(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_control_instances_vendor ON vendor_control_instances(vendor_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_control_instances_control_id ON vendor_control_instances(control_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_vendor_control_instances_status ON vendor_control_instances(tenant_id, status);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: Templates
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_type ON compliance_templates(template_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_category ON compliance_templates(category)
|
||||||
|
WHERE category IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_industry ON compliance_templates(industry)
|
||||||
|
WHERE industry IS NOT NULL;
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_system ON compliance_templates(is_system);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_active ON compliance_templates(is_active);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_compliance_templates_template_id ON compliance_templates(template_id);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Reuse existing update_updated_at_column function
|
||||||
|
|
||||||
|
-- Vendors trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_vendor_vendors_updated_at ON vendor_vendors;
|
||||||
|
CREATE TRIGGER update_vendor_vendors_updated_at
|
||||||
|
BEFORE UPDATE ON vendor_vendors
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Contracts trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_vendor_contracts_updated_at ON vendor_contracts;
|
||||||
|
CREATE TRIGGER update_vendor_contracts_updated_at
|
||||||
|
BEFORE UPDATE ON vendor_contracts
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Findings trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_vendor_findings_updated_at ON vendor_findings;
|
||||||
|
CREATE TRIGGER update_vendor_findings_updated_at
|
||||||
|
BEFORE UPDATE ON vendor_findings
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Control instances trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_vendor_control_instances_updated_at ON vendor_control_instances;
|
||||||
|
CREATE TRIGGER update_vendor_control_instances_updated_at
|
||||||
|
BEFORE UPDATE ON vendor_control_instances
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- Templates trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_compliance_templates_updated_at ON compliance_templates;
|
||||||
|
CREATE TRIGGER update_compliance_templates_updated_at
|
||||||
|
BEFORE UPDATE ON compliance_templates
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Comments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Table comments
|
||||||
|
COMMENT ON TABLE vendor_vendors IS 'Service provider registry for vendor compliance management (DSGVO Art. 28)';
|
||||||
|
COMMENT ON TABLE vendor_contracts IS 'Contract and AVV/DPA document management with AI-assisted review';
|
||||||
|
COMMENT ON TABLE vendor_findings IS 'Compliance findings from contract reviews and vendor assessments';
|
||||||
|
COMMENT ON TABLE vendor_control_instances IS 'Applied controls per vendor with assessment tracking';
|
||||||
|
COMMENT ON TABLE compliance_templates IS 'Pre-filled templates for vendors, processing activities, TOMs, and control sets';
|
||||||
|
|
||||||
|
-- Vendor column comments
|
||||||
|
COMMENT ON COLUMN vendor_vendors.role IS 'DSGVO role: PROCESSOR (Art. 28), CONTROLLER, JOINT_CONTROLLER (Art. 26), SUB_PROCESSOR, THIRD_PARTY';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.data_access_level IS 'Level of access to personal data: NONE, POTENTIAL, ADMINISTRATIVE, CONTENT';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.processing_locations IS 'JSONB array: Data processing locations with EU/adequacy status for transfer assessment';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.certifications IS 'JSONB array: Vendor certifications (ISO 27001, SOC 2, etc.)';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.inherent_risk_score IS 'Risk score (0-100) before controls are applied';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.residual_risk_score IS 'Risk score (0-100) after controls are applied';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.review_frequency IS 'How often the vendor must be reviewed: QUARTERLY, SEMI_ANNUAL, ANNUAL, BIENNIAL';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.processing_activity_ids IS 'JSONB array: UUIDs linking to dsgvo_processing_activities entries';
|
||||||
|
COMMENT ON COLUMN vendor_vendors.template_id IS 'Reference to compliance_templates.template_id if vendor was created from a template';
|
||||||
|
|
||||||
|
-- Contract column comments
|
||||||
|
COMMENT ON COLUMN vendor_contracts.document_type IS 'Contract type: AVV (Auftragsverarbeitungsvertrag), MSA, SLA, SCC (Standard Contractual Clauses), NDA, TOM_ANNEX, CERTIFICATION, SUB_PROCESSOR_LIST';
|
||||||
|
COMMENT ON COLUMN vendor_contracts.storage_path IS 'MinIO object storage path for the uploaded document';
|
||||||
|
COMMENT ON COLUMN vendor_contracts.compliance_score IS 'AI-assessed compliance score (0-100) from contract review';
|
||||||
|
COMMENT ON COLUMN vendor_contracts.extracted_text IS 'Full text extracted from the document for AI analysis';
|
||||||
|
COMMENT ON COLUMN vendor_contracts.previous_version_id IS 'Self-referencing FK for contract version history';
|
||||||
|
|
||||||
|
-- Finding column comments
|
||||||
|
COMMENT ON COLUMN vendor_findings.finding_type IS 'Classification: OK (compliant), GAP (missing clause), RISK (problematic clause), UNKNOWN (could not determine)';
|
||||||
|
COMMENT ON COLUMN vendor_findings.category IS 'DSGVO Art. 28 requirement category the finding relates to';
|
||||||
|
COMMENT ON COLUMN vendor_findings.citations IS 'JSONB array: References to specific contract text passages with page/character offsets';
|
||||||
|
COMMENT ON COLUMN vendor_findings.status IS 'Workflow status: OPEN, IN_PROGRESS, RESOLVED, ACCEPTED (risk accepted), FALSE_POSITIVE';
|
||||||
|
|
||||||
|
-- Control instance column comments
|
||||||
|
COMMENT ON COLUMN vendor_control_instances.control_id IS 'Control identifier (e.g., VND-TRF-01 for transfer controls)';
|
||||||
|
COMMENT ON COLUMN vendor_control_instances.control_domain IS 'Control domain: TRANSFER, AUDIT, DELETION, INCIDENT, SUBPROCESSOR, TOM, CONTRACT, DATA_SUBJECT, SECURITY, GOVERNANCE';
|
||||||
|
COMMENT ON COLUMN vendor_control_instances.status IS 'Assessment result: PASS, PARTIAL, FAIL, NOT_APPLICABLE, PLANNED';
|
||||||
|
COMMENT ON COLUMN vendor_control_instances.evidence_ids IS 'JSONB array: References to evidence documents or contract IDs';
|
||||||
|
|
||||||
|
-- Template column comments
|
||||||
|
COMMENT ON COLUMN compliance_templates.template_type IS 'Template category: VENDOR, PROCESSING_ACTIVITY, TOM, CONTROL_SET';
|
||||||
|
COMMENT ON COLUMN compliance_templates.template_id IS 'Human-readable unique identifier (e.g., tpl-vendor-cloud-iaas)';
|
||||||
|
COMMENT ON COLUMN compliance_templates.template_data IS 'JSONB: Full template content including all pre-filled fields';
|
||||||
|
COMMENT ON COLUMN compliance_templates.is_system IS 'true = pre-installed system template, false = user-created tenant template';
|
||||||
|
COMMENT ON COLUMN compliance_templates.usage_count IS 'Number of times this template has been used to create entities';
|
||||||
175
ai-compliance-sdk/migrations/013_dsb_portal_schema.sql
Normal file
175
ai-compliance-sdk/migrations/013_dsb_portal_schema.sql
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- Migration 013: DSB-as-a-Service Portal Schema
|
||||||
|
-- Datenschutzbeauftragter (Data Protection Officer) Portal
|
||||||
|
--
|
||||||
|
-- Provides a portal for external DSBs to manage multiple client tenants,
|
||||||
|
-- track hours, manage tasks, and communicate.
|
||||||
|
--
|
||||||
|
-- Depends on: 001_rbac_schema.sql (compliance_tenants)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- DSB Assignments: which DSB is assigned to which tenant
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS dsb_assignments (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
dsb_user_id UUID NOT NULL, -- the DSB user
|
||||||
|
tenant_id UUID NOT NULL REFERENCES compliance_tenants(id),
|
||||||
|
status VARCHAR(20) NOT NULL DEFAULT 'active', -- active, paused, terminated
|
||||||
|
contract_start DATE NOT NULL,
|
||||||
|
contract_end DATE,
|
||||||
|
monthly_hours_budget DECIMAL(5,1) DEFAULT 0,
|
||||||
|
notes TEXT DEFAULT '',
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
UNIQUE(dsb_user_id, tenant_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- DSB Time Tracking
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS dsb_hours (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
assignment_id UUID NOT NULL REFERENCES dsb_assignments(id) ON DELETE CASCADE,
|
||||||
|
date DATE NOT NULL,
|
||||||
|
hours DECIMAL(4,1) NOT NULL,
|
||||||
|
category VARCHAR(50) NOT NULL, -- 'dsfa_review', 'consultation', 'audit', 'training', 'incident_response', 'documentation', 'meeting', 'other'
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
billable BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- DSB Tasks / Queue
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS dsb_tasks (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
assignment_id UUID NOT NULL REFERENCES dsb_assignments(id) ON DELETE CASCADE,
|
||||||
|
title VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT DEFAULT '',
|
||||||
|
category VARCHAR(50) NOT NULL, -- 'dsfa_review', 'dsr_response', 'incident_review', 'audit_preparation', 'policy_review', 'training', 'consultation', 'other'
|
||||||
|
priority VARCHAR(20) NOT NULL DEFAULT 'medium', -- low, medium, high, urgent
|
||||||
|
status VARCHAR(20) NOT NULL DEFAULT 'open', -- open, in_progress, waiting, completed, cancelled
|
||||||
|
due_date DATE,
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- DSB Communication Log
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS dsb_communications (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
assignment_id UUID NOT NULL REFERENCES dsb_assignments(id) ON DELETE CASCADE,
|
||||||
|
direction VARCHAR(10) NOT NULL, -- 'inbound', 'outbound'
|
||||||
|
channel VARCHAR(20) NOT NULL, -- 'email', 'phone', 'meeting', 'portal', 'letter'
|
||||||
|
subject VARCHAR(255) NOT NULL,
|
||||||
|
content TEXT DEFAULT '',
|
||||||
|
participants TEXT DEFAULT '',
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: DSB Assignments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_assignments_dsb_user_id ON dsb_assignments(dsb_user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_assignments_tenant_id ON dsb_assignments(tenant_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_assignments_status ON dsb_assignments(status);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: DSB Hours
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_hours_assignment_id ON dsb_hours(assignment_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_hours_date ON dsb_hours(date);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: DSB Tasks
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_tasks_assignment_id ON dsb_tasks(assignment_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_tasks_status ON dsb_tasks(status);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_tasks_priority ON dsb_tasks(priority);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_tasks_due_date ON dsb_tasks(due_date);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes: DSB Communications
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_dsb_communications_assignment_id ON dsb_communications(assignment_id);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Triggers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Ensure update_updated_at_column() function exists (created in earlier migrations)
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = NOW();
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- DSB Assignments trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_dsb_assignments_updated_at ON dsb_assignments;
|
||||||
|
CREATE TRIGGER update_dsb_assignments_updated_at
|
||||||
|
BEFORE UPDATE ON dsb_assignments
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- DSB Tasks trigger
|
||||||
|
DROP TRIGGER IF EXISTS update_dsb_tasks_updated_at ON dsb_tasks;
|
||||||
|
CREATE TRIGGER update_dsb_tasks_updated_at
|
||||||
|
BEFORE UPDATE ON dsb_tasks
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Comments
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Table comments
|
||||||
|
COMMENT ON TABLE dsb_assignments IS 'DSB-as-a-Service: Maps external Data Protection Officers (DSBs) to client tenants with contract details and hour budgets';
|
||||||
|
COMMENT ON TABLE dsb_hours IS 'DSB-as-a-Service: Time tracking entries for DSB work on assigned tenants, categorized and billable';
|
||||||
|
COMMENT ON TABLE dsb_tasks IS 'DSB-as-a-Service: Task queue for DSB work items per tenant assignment with priority and status tracking';
|
||||||
|
COMMENT ON TABLE dsb_communications IS 'DSB-as-a-Service: Communication log between DSB and client tenant, tracking direction, channel, and content';
|
||||||
|
|
||||||
|
-- DSB Assignments column comments
|
||||||
|
COMMENT ON COLUMN dsb_assignments.dsb_user_id IS 'UUID of the Data Protection Officer user account';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.tenant_id IS 'UUID of the client tenant this DSB is assigned to';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.status IS 'Assignment status: active, paused, or terminated';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.contract_start IS 'Start date of the DSB service contract';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.contract_end IS 'End date of the DSB service contract (NULL for open-ended)';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.monthly_hours_budget IS 'Monthly hour budget allocated for this tenant';
|
||||||
|
COMMENT ON COLUMN dsb_assignments.notes IS 'Internal notes about the assignment';
|
||||||
|
|
||||||
|
-- DSB Hours column comments
|
||||||
|
COMMENT ON COLUMN dsb_hours.assignment_id IS 'Reference to the DSB assignment this time entry belongs to';
|
||||||
|
COMMENT ON COLUMN dsb_hours.date IS 'Date the work was performed';
|
||||||
|
COMMENT ON COLUMN dsb_hours.hours IS 'Number of hours worked (e.g. 1.5)';
|
||||||
|
COMMENT ON COLUMN dsb_hours.category IS 'Work category: dsfa_review, consultation, audit, training, incident_response, documentation, meeting, other';
|
||||||
|
COMMENT ON COLUMN dsb_hours.description IS 'Description of work performed';
|
||||||
|
COMMENT ON COLUMN dsb_hours.billable IS 'Whether this time entry is billable to the client';
|
||||||
|
|
||||||
|
-- DSB Tasks column comments
|
||||||
|
COMMENT ON COLUMN dsb_tasks.assignment_id IS 'Reference to the DSB assignment this task belongs to';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.title IS 'Short title describing the task';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.description IS 'Detailed task description';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.category IS 'Task category: dsfa_review, dsr_response, incident_review, audit_preparation, policy_review, training, consultation, other';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.priority IS 'Task priority: low, medium, high, urgent';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.status IS 'Task status: open, in_progress, waiting, completed, cancelled';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.due_date IS 'Due date for the task (NULL if no deadline)';
|
||||||
|
COMMENT ON COLUMN dsb_tasks.completed_at IS 'Timestamp when the task was completed';
|
||||||
|
|
||||||
|
-- DSB Communications column comments
|
||||||
|
COMMENT ON COLUMN dsb_communications.assignment_id IS 'Reference to the DSB assignment this communication belongs to';
|
||||||
|
COMMENT ON COLUMN dsb_communications.direction IS 'Communication direction: inbound (from client) or outbound (from DSB)';
|
||||||
|
COMMENT ON COLUMN dsb_communications.channel IS 'Communication channel: email, phone, meeting, portal, letter';
|
||||||
|
COMMENT ON COLUMN dsb_communications.subject IS 'Subject line or topic of the communication';
|
||||||
|
COMMENT ON COLUMN dsb_communications.content IS 'Full content or summary of the communication';
|
||||||
|
COMMENT ON COLUMN dsb_communications.participants IS 'Comma-separated list of participants';
|
||||||
769
developer-portal/app/development/byoeh/page.tsx
Normal file
769
developer-portal/app/development/byoeh/page.tsx
Normal file
@@ -0,0 +1,769 @@
|
|||||||
|
import { DevPortalLayout, CodeBlock, InfoBox } from '@/components/DevPortalLayout'
|
||||||
|
|
||||||
|
export default function BYOEHDocsPage() {
|
||||||
|
return (
|
||||||
|
<DevPortalLayout
|
||||||
|
title="Namespace-Technologie fuer Geschaeftskunden"
|
||||||
|
description="Wie das SDK sensible Daten anonymisiert, verschluesselt und sicher in der Cloud verarbeiten laesst -- ohne dass der Betreiber Zugriff auf Klartext hat."
|
||||||
|
>
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 1. EINLEITUNG */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="einfuehrung">1. Was ist die Namespace-Technologie?</h2>
|
||||||
|
<p>
|
||||||
|
Unsere <strong>Namespace-Technologie</strong> (intern BYOEH -- Bring Your Own Expectation Horizon)
|
||||||
|
ist eine Privacy-First-Architektur, die es Geschaeftskunden ermoeglicht, <strong>sensible Daten
|
||||||
|
anonym und verschluesselt</strong> von KI-Services in der Cloud verarbeiten zu lassen -- ohne dass
|
||||||
|
personenbezogene Informationen jemals den Client verlassen.
|
||||||
|
</p>
|
||||||
|
<blockquote>
|
||||||
|
<em>“Daten gehen pseudonymisiert und verschluesselt in die Cloud, werden dort
|
||||||
|
von KI verarbeitet, und kommen verarbeitet zurueck. Nur der Kunde kann die Ergebnisse
|
||||||
|
wieder den Originaldaten zuordnen -- denn nur sein System hat den Schluessel dafuer.”</em>
|
||||||
|
</blockquote>
|
||||||
|
<p>
|
||||||
|
Das SDK loest ein grundlegendes Problem fuer Unternehmen: <strong>KI-gestuetzte
|
||||||
|
Datenverarbeitung ohne Datenschutzrisiko</strong>. Die Architektur basiert auf vier Bausteinen:
|
||||||
|
</p>
|
||||||
|
<ol>
|
||||||
|
<li>
|
||||||
|
<strong>Pseudonymisierung:</strong> Personenbezogene Daten werden durch zufaellige
|
||||||
|
Tokens ersetzt. Nur der Kunde kennt die Zuordnung.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Client-seitige Verschluesselung:</strong> Alle Daten werden <em>auf dem System
|
||||||
|
des Kunden</em> verschluesselt, bevor sie die Infrastruktur verlassen. Der Cloud-Server
|
||||||
|
sieht nur verschluesselte Blobs.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Namespace-Isolation:</strong> Jeder Kunde erhaelt einen eigenen, vollstaendig
|
||||||
|
abgeschotteten Namespace. Kein Kunde kann auf Daten eines anderen zugreifen.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>KI-Verarbeitung in der Cloud:</strong> Die KI arbeitet mit den pseudonymisierten
|
||||||
|
Daten und den vom Kunden bereitgestellten Referenzdokumenten. Ergebnisse gehen zurueck
|
||||||
|
an den Kunden zur lokalen Entschluesselung und Re-Identifizierung.
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
<InfoBox type="info" title="Kern-Designprinzip: Operator Blindness">
|
||||||
|
<strong>Breakpilot kann die Kundendaten nicht lesen.</strong> Der Server sieht nur
|
||||||
|
verschluesselte Blobs und einen Schluessel-Hash (nicht den Schluessel selbst). Die
|
||||||
|
Passphrase zum Entschluesseln existiert <em>ausschliesslich</em> auf dem System des Kunden
|
||||||
|
und wird niemals uebertragen. Selbst ein Angriff auf die Cloud-Infrastruktur wuerde keine
|
||||||
|
Klartextdaten preisgeben.
|
||||||
|
</InfoBox>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 2. ANWENDUNGSFAELLE */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="anwendungsfaelle">2. Typische Anwendungsfaelle</h2>
|
||||||
|
<p>
|
||||||
|
Die Namespace-Technologie ist ueberall einsetzbar, wo sensible Daten von einer KI
|
||||||
|
verarbeitet werden sollen, ohne den Datenschutz zu gefaehrden:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="not-prose my-6 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Branche</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Anwendungsfall</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Sensible Daten</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Bildung</td>
|
||||||
|
<td className="px-4 py-3">KI-gestuetzte Klausurkorrektur</td>
|
||||||
|
<td className="px-4 py-3">Schuelernamen, Noten, Leistungsdaten</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Gesundheitswesen</td>
|
||||||
|
<td className="px-4 py-3">Medizinische Befundanalyse</td>
|
||||||
|
<td className="px-4 py-3">Patientennamen, Diagnosen, Befunde</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Recht</td>
|
||||||
|
<td className="px-4 py-3">Vertragsanalyse, Due Diligence</td>
|
||||||
|
<td className="px-4 py-3">Mandantendaten, Vertragsinhalte</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Personalwesen</td>
|
||||||
|
<td className="px-4 py-3">Bewerbungsscreening, Zeugnisanalyse</td>
|
||||||
|
<td className="px-4 py-3">Bewerberdaten, Gehaltsinformationen</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Finanzwesen</td>
|
||||||
|
<td className="px-4 py-3">Dokumentenpruefung, Compliance-Checks</td>
|
||||||
|
<td className="px-4 py-3">Kontodaten, Transaktionen, Identitaeten</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 3. DER KOMPLETTE ABLAUF */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="ablauf">3. Der komplette Ablauf im Ueberblick</h2>
|
||||||
|
<p>
|
||||||
|
Der Prozess laesst sich in sieben Schritte unterteilen. Die gesamte
|
||||||
|
Pseudonymisierung und Verschluesselung geschieht auf dem System des Kunden,
|
||||||
|
bevor Daten in die Cloud gesendet werden:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<CodeBlock language="text" filename="Workflow: Vom Quelldokument zur KI-verarbeiteten Ausgabe">
|
||||||
|
{`SCHRITT 1: DOKUMENTE ERFASSEN & PSEUDONYMISIEREN
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
SDK empfaengt Dokumente (PDF, Bild, Text)
|
||||||
|
→ Personenbezogene Daten werden erkannt (Header, Namen, IDs)
|
||||||
|
→ PII wird durch zufaellige Tokens ersetzt (doc_token, UUID4)
|
||||||
|
→ Zuordnung "Token → Originalname" wird lokal gesichert
|
||||||
|
|
||||||
|
SCHRITT 2: CLIENT-SEITIGE VERSCHLUESSELUNG
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
Kunde konfiguriert eine Passphrase im SDK
|
||||||
|
→ SDK leitet daraus einen 256-Bit-Schluessel ab (PBKDF2, 100k Runden)
|
||||||
|
→ Dokumente werden mit AES-256-GCM verschluesselt
|
||||||
|
→ Nur der Hash des Schluessels wird an den Server gesendet
|
||||||
|
→ Passphrase und Schluessel verlassen NIEMALS das Kundensystem
|
||||||
|
|
||||||
|
SCHRITT 3: IDENTITAETS-MAP SICHERN
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
Die Zuordnung "Token → Originaldaten" wird verschluesselt gespeichert:
|
||||||
|
→ Nur mit der Passphrase des Kunden rekonstruierbar
|
||||||
|
→ Ohne Passphrase ist keine Re-Identifizierung moeglich
|
||||||
|
|
||||||
|
SCHRITT 4: UPLOAD IN DEN KUNDEN-NAMESPACE
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
Verschluesselte Dateien gehen in den isolierten Namespace:
|
||||||
|
→ Jeder Kunde hat eine eigene tenant_id
|
||||||
|
→ Daten werden in MinIO (Storage) + Qdrant (Vektoren) gespeichert
|
||||||
|
→ Server sieht: verschluesselter Blob + Schluessel-Hash + Salt
|
||||||
|
|
||||||
|
SCHRITT 5: KI-VERARBEITUNG IN DER CLOUD
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
KI verarbeitet die pseudonymisierten Daten:
|
||||||
|
→ RAG-System durchsucht Referenzdokumente des Kunden
|
||||||
|
→ KI generiert Ergebnisse basierend auf Kundenkontext
|
||||||
|
→ Ergebnisse sind an den Namespace gebunden
|
||||||
|
|
||||||
|
SCHRITT 6: ERGEBNISSE ZURUECK
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
KI-Ergebnisse gehen an das Kundensystem:
|
||||||
|
→ SDK entschluesselt die Ergebnisse mit der Passphrase
|
||||||
|
→ Kunde sieht aufbereitete Ergebnisse im Klartext
|
||||||
|
|
||||||
|
SCHRITT 7: RE-IDENTIFIZIERUNG & FINALISIERUNG
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||||
|
Kunde ordnet Ergebnisse den Originaldaten zu:
|
||||||
|
→ Identitaets-Map wird entschluesselt
|
||||||
|
→ Tokens werden wieder den echten Datensaetzen zugeordnet
|
||||||
|
→ Fertige Ergebnisse stehen im Originalsystem bereit`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 4. SDK-INTEGRATION */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="sdk-integration">4. SDK-Integration</h2>
|
||||||
|
<p>
|
||||||
|
Die Integration in bestehende Systeme erfolgt ueber unser SDK. Nachfolgend ein
|
||||||
|
vereinfachtes Beispiel, wie ein Kunde das SDK nutzt:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<CodeBlock language="typescript" filename="Beispiel: SDK-Integration (TypeScript)">
|
||||||
|
{`import { BreakpilotSDK, NamespaceClient } from '@breakpilot/compliance-sdk'
|
||||||
|
|
||||||
|
// 1. SDK initialisieren mit API-Key
|
||||||
|
const sdk = new BreakpilotSDK({
|
||||||
|
apiKey: process.env.BREAKPILOT_API_KEY,
|
||||||
|
endpoint: 'https://api.breakpilot.de'
|
||||||
|
})
|
||||||
|
|
||||||
|
// 2. Namespace-Client erstellen (pro Mandant/Abteilung)
|
||||||
|
const namespace = sdk.createNamespace({
|
||||||
|
tenantId: 'kunde-firma-abc',
|
||||||
|
passphrase: process.env.ENCRYPTION_PASSPHRASE // Bleibt lokal!
|
||||||
|
})
|
||||||
|
|
||||||
|
// 3. Dokument pseudonymisieren & verschluesselt hochladen
|
||||||
|
const result = await namespace.upload({
|
||||||
|
file: documentBuffer,
|
||||||
|
metadata: { type: 'vertrag', category: 'due-diligence' },
|
||||||
|
pseudonymize: true, // PII automatisch ersetzen
|
||||||
|
headerRedaction: true // Kopfbereich entfernen
|
||||||
|
})
|
||||||
|
// result.docToken = "a7f3c2d1-4e9b-4a5f-8c7d-..."
|
||||||
|
|
||||||
|
// 4. Referenzdokument hochladen (z.B. Pruefkriterien)
|
||||||
|
await namespace.uploadReference({
|
||||||
|
file: referenceBuffer,
|
||||||
|
title: 'Pruefkriterien Vertrag Typ A'
|
||||||
|
})
|
||||||
|
|
||||||
|
// 5. KI-Verarbeitung anstossen
|
||||||
|
const analysis = await namespace.analyze({
|
||||||
|
docToken: result.docToken,
|
||||||
|
prompt: 'Pruefe den Vertrag gegen die Referenzkriterien',
|
||||||
|
useRAG: true
|
||||||
|
})
|
||||||
|
|
||||||
|
// 6. Ergebnisse entschluesseln (passiert automatisch im SDK)
|
||||||
|
console.log(analysis.findings) // Klartext-Ergebnisse
|
||||||
|
console.log(analysis.score) // Bewertung
|
||||||
|
|
||||||
|
// 7. Re-Identifizierung (Token → Originalname)
|
||||||
|
const identityMap = await namespace.getIdentityMap()
|
||||||
|
const originalName = identityMap[result.docToken]`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<InfoBox type="success" title="Zero-Knowledge-Architektur">
|
||||||
|
Die Passphrase verlässt niemals das System des Kunden. Das SDK verschluesselt
|
||||||
|
und entschluesselt <strong>ausschliesslich lokal</strong>. Breakpilot hat zu keinem
|
||||||
|
Zeitpunkt Zugriff auf Klartextdaten oder den Verschluesselungsschluessel.
|
||||||
|
</InfoBox>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 5. PSEUDONYMISIERUNG */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="pseudonymisierung">5. Pseudonymisierung: Wie personenbezogene Daten entfernt werden</h2>
|
||||||
|
<p>
|
||||||
|
Pseudonymisierung bedeutet: personenbezogene Daten werden durch <strong>zufaellige
|
||||||
|
Tokens</strong> ersetzt, sodass ohne Zusatzinformation kein Rueckschluss auf die Person
|
||||||
|
moeglich ist. Das SDK bietet zwei Mechanismen:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>5.1 Der doc_token: Ein zufaelliger Identifikator</h3>
|
||||||
|
<p>
|
||||||
|
Jedes Dokument erhaelt einen <strong>doc_token</strong> -- einen 128-Bit-Zufallscode im
|
||||||
|
UUID4-Format (z.B. <code>a7f3c2d1-4e9b-4a5f-8c7d-6b2e1f0a9d3c</code>). Dieser Token:
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>Ist <strong>kryptographisch zufaellig</strong> -- es gibt keinen Zusammenhang zwischen
|
||||||
|
Token und Originaldatensatz</li>
|
||||||
|
<li>Kann <strong>nicht zurueckgerechnet</strong> werden -- auch mit Kenntnis des Algorithmus
|
||||||
|
ist kein Rueckschluss moeglich</li>
|
||||||
|
<li>Dient als <strong>eindeutiger Schluessel</strong>, um Ergebnisse spaeter dem
|
||||||
|
Originaldokument zuzuordnen</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h3>5.2 Header-Redaction: PII wird entfernt</h3>
|
||||||
|
<p>
|
||||||
|
Bei Dokumenten mit erkennbarem Kopfbereich (Namen, Adressen, IDs) kann das SDK diesen
|
||||||
|
Bereich <strong>automatisch entfernen</strong>. Die Entfernung ist <strong>permanent</strong>:
|
||||||
|
Die Originaldaten werden nicht an den Server uebermittelt.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="not-prose my-6 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Methode</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Wie es funktioniert</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Wann verwenden</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Einfache Redaction</td>
|
||||||
|
<td className="px-4 py-3">Definierter Bereich des Dokuments wird entfernt</td>
|
||||||
|
<td className="px-4 py-3">Standardisierte Formulare mit festem Layout</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Smarte Redaction</td>
|
||||||
|
<td className="px-4 py-3">OpenCV/NER erkennt Textbereiche mit PII und entfernt gezielt</td>
|
||||||
|
<td className="px-4 py-3">Freitext-Dokumente, variable Layouts</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>5.3 Die Identitaets-Map: Nur der Kunde kennt die Zuordnung</h3>
|
||||||
|
<p>
|
||||||
|
Die Zuordnung <em>doc_token → Originaldaten</em> wird als <strong>verschluesselte Tabelle</strong>
|
||||||
|
gespeichert. Das Datenmodell sieht vereinfacht so aus:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<CodeBlock language="text" filename="Datenmodell: Namespace-Session (vereinfacht)">
|
||||||
|
{`NamespaceSession
|
||||||
|
├── tenant_id = "kunde-firma-abc" ← Pflichtfeld (Isolation)
|
||||||
|
├── encrypted_identity_map = [verschluesselte Bytes] ← Nur mit Passphrase lesbar
|
||||||
|
├── identity_map_iv = "a3f2c1..." ← Initialisierungsvektor (fuer AES)
|
||||||
|
│
|
||||||
|
└── PseudonymizedDocument (pro Dokument)
|
||||||
|
├── doc_token = "a7f3c2d1-..." ← Zufaelliger Token (Primary Key)
|
||||||
|
├── session_id = [Referenz]
|
||||||
|
└── (Kein Name, keine personenbezogenen Daten)`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<InfoBox type="success" title="DSGVO Art. 4 Nr. 5 konform">
|
||||||
|
Die Pseudonymisierung erfuellt die Definition der DSGVO: Personenbezogene Daten
|
||||||
|
koennen <strong>ohne Hinzuziehung zusaetzlicher Informationen</strong>
|
||||||
|
(der verschluesselten Identitaets-Map + der Passphrase des Kunden) nicht mehr einer
|
||||||
|
bestimmten Person zugeordnet werden.
|
||||||
|
</InfoBox>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 6. VERSCHLUESSELUNG */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="verschluesselung">6. Ende-zu-Ende-Verschluesselung</h2>
|
||||||
|
<p>
|
||||||
|
Die Verschluesselung ist das Herzstueck des Datenschutzes. Sie findet <strong>vollstaendig
|
||||||
|
auf dem System des Kunden</strong> statt -- der Cloud-Server bekommt nur verschluesselte
|
||||||
|
Daten zu sehen.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>6.1 Der Verschluesselungsvorgang</h3>
|
||||||
|
|
||||||
|
<CodeBlock language="text" filename="Client-seitige Verschluesselung (im SDK)">
|
||||||
|
{`┌─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ System des Kunden (SDK) │
|
||||||
|
├─────────────────────────────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ 1. Kunde konfiguriert Passphrase im SDK │
|
||||||
|
│ │ ↑ │
|
||||||
|
│ │ │ Passphrase bleibt hier -- wird NIE gesendet │
|
||||||
|
│ ▼ │
|
||||||
|
│ 2. Schluessel-Ableitung: │
|
||||||
|
│ PBKDF2-SHA256(Passphrase, zufaelliger Salt, 100.000 Runden) │
|
||||||
|
│ │ │
|
||||||
|
│ │ → Ergebnis: 256-Bit-Schluessel (32 Bytes) │
|
||||||
|
│ │ → 100.000 Runden machen Brute-Force unpraktikabel │
|
||||||
|
│ ▼ │
|
||||||
|
│ 3. Verschluesselung: │
|
||||||
|
│ AES-256-GCM(Schluessel, zufaelliger IV, Dokument) │
|
||||||
|
│ │ │
|
||||||
|
│ │ → AES-256: Militaerstandard, 2^256 moegliche Schluessel │
|
||||||
|
│ │ → GCM: Garantiert Integritaet (Manipulation erkennbar) │
|
||||||
|
│ ▼ │
|
||||||
|
│ 4. Schluessel-Hash: │
|
||||||
|
│ SHA-256(abgeleiteter Schluessel) → Hash fuer Verifikation │
|
||||||
|
│ │ │
|
||||||
|
│ │ → Server speichert nur diesen Hash │
|
||||||
|
│ │ → Damit kann geprueft werden ob die Passphrase stimmt │
|
||||||
|
│ │ → Vom Hash kann der Schluessel NICHT zurueckberechnet │
|
||||||
|
│ │ werden │
|
||||||
|
│ ▼ │
|
||||||
|
│ 5. Upload: Nur diese Daten gehen an den Cloud-Server: │
|
||||||
|
│ • Verschluesselter Blob (unlesbar ohne Schluessel) │
|
||||||
|
│ • Salt (zufaellige Bytes, harmlos) │
|
||||||
|
│ • IV (Initialisierungsvektor, harmlos) │
|
||||||
|
│ • Schluessel-Hash (zur Verifikation, nicht umkehrbar) │
|
||||||
|
│ │
|
||||||
|
│ Was NICHT an den Server geht: │
|
||||||
|
│ ✗ Passphrase │
|
||||||
|
│ ✗ Abgeleiteter Schluessel │
|
||||||
|
│ ✗ Unverschluesselter Klartext │
|
||||||
|
└─────────────────────────────────────────────────────────────────┘`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<h3>6.2 Sicherheitsgarantien</h3>
|
||||||
|
<div className="not-prose my-6 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Angriffsszenario</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Was der Angreifer sieht</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Ergebnis</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Cloud-Server wird gehackt</td>
|
||||||
|
<td className="px-4 py-3">Verschluesselte Blobs + Hashes</td>
|
||||||
|
<td className="px-4 py-3 text-green-700 font-medium">Keine lesbaren Dokumente</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Datenbank wird geleakt</td>
|
||||||
|
<td className="px-4 py-3">encrypted_identity_map (verschluesselt)</td>
|
||||||
|
<td className="px-4 py-3 text-green-700 font-medium">Keine personenbezogenen Daten</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Netzwerkverkehr abgefangen</td>
|
||||||
|
<td className="px-4 py-3">Verschluesselte Daten (TLS + AES)</td>
|
||||||
|
<td className="px-4 py-3 text-green-700 font-medium">Doppelt verschluesselt</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Betreiber (Breakpilot) will mitlesen</td>
|
||||||
|
<td className="px-4 py-3">Verschluesselte Blobs, kein Schluessel</td>
|
||||||
|
<td className="px-4 py-3 text-green-700 font-medium">Operator Blindness</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Anderer Kunde versucht Zugriff</td>
|
||||||
|
<td className="px-4 py-3">Nichts (Tenant-Isolation)</td>
|
||||||
|
<td className="px-4 py-3 text-green-700 font-medium">Namespace blockiert</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 7. NAMESPACE / TENANT-ISOLATION */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="namespace">7. Namespace-Isolation: Jeder Kunde hat seinen eigenen Bereich</h2>
|
||||||
|
<p>
|
||||||
|
Ein <strong>Namespace</strong> (auch “Tenant” genannt) ist ein vollstaendig
|
||||||
|
abgeschotteter Bereich im System. Man kann es sich wie <strong>separate Tresorraeume
|
||||||
|
in einer Bank</strong> vorstellen: Jeder Kunde hat seinen eigenen Raum, und kein Schluessel
|
||||||
|
passt in einen anderen.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>7.1 Wie die Isolation funktioniert</h3>
|
||||||
|
<p>
|
||||||
|
Jeder Kunde erhaelt eine eindeutige <code>tenant_id</code>. Diese ID wird
|
||||||
|
bei <strong>jeder einzelnen Datenbankabfrage</strong> als Pflichtfilter verwendet:
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<CodeBlock language="text" filename="Tenant-Isolation in der Vektordatenbank (Qdrant)">
|
||||||
|
{`Kunde A (tenant_id: "firma-alpha-001")
|
||||||
|
├── Dokument 1 (verschluesselt)
|
||||||
|
├── Dokument 2 (verschluesselt)
|
||||||
|
└── Referenz: Pruefkriterien 2025
|
||||||
|
|
||||||
|
Kunde B (tenant_id: "firma-beta-002")
|
||||||
|
├── Dokument 1 (verschluesselt)
|
||||||
|
└── Referenz: Compliance-Vorgaben 2025
|
||||||
|
|
||||||
|
Suchanfrage von Kunde A:
|
||||||
|
"Welche Klauseln weichen von den Referenzkriterien ab?"
|
||||||
|
→ Suche NUR in tenant_id = "firma-alpha-001"
|
||||||
|
→ Kunde B's Daten sind UNSICHTBAR
|
||||||
|
|
||||||
|
Jede Qdrant-Query hat diesen Pflichtfilter:
|
||||||
|
must_conditions = [
|
||||||
|
FieldCondition(key="tenant_id", match="firma-alpha-001")
|
||||||
|
]
|
||||||
|
|
||||||
|
Es gibt KEINE Abfrage ohne tenant_id-Filter.`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<h3>7.2 Drei Ebenen der Isolation</h3>
|
||||||
|
<div className="not-prose my-6 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Ebene</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">System</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Isolation</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Dateisystem</td>
|
||||||
|
<td className="px-4 py-3">MinIO (S3-Storage)</td>
|
||||||
|
<td className="px-4 py-3">Eigener Bucket/Pfad pro Kunde: <code>/tenant-id/doc-id/encrypted.bin</code></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Vektordatenbank</td>
|
||||||
|
<td className="px-4 py-3">Qdrant</td>
|
||||||
|
<td className="px-4 py-3">Pflichtfilter <code>tenant_id</code> bei jeder Suche</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Metadaten-DB</td>
|
||||||
|
<td className="px-4 py-3">PostgreSQL</td>
|
||||||
|
<td className="px-4 py-3">Jede Tabelle hat <code>tenant_id</code> als Pflichtfeld</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<InfoBox type="warning" title="Kein Training mit Kundendaten">
|
||||||
|
Auf allen Vektoren in Qdrant ist das Flag <code>training_allowed: false</code> gesetzt.
|
||||||
|
Kundeninhalte werden <strong>ausschliesslich fuer RAG-Suchen</strong> innerhalb des
|
||||||
|
Kunden-Namespace verwendet und <strong>niemals zum Trainieren</strong> eines KI-Modells
|
||||||
|
eingesetzt.
|
||||||
|
</InfoBox>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 8. RAG-PIPELINE */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="rag-pipeline">8. RAG-Pipeline: KI-Verarbeitung mit Kundenkontext</h2>
|
||||||
|
<p>
|
||||||
|
Die KI nutzt die vom Kunden hochgeladenen Referenzdokumente als Wissensbasis.
|
||||||
|
Dieser Prozess heisst <strong>RAG (Retrieval Augmented Generation)</strong>:
|
||||||
|
Die KI “liest” zuerst die relevanten Referenzen und generiert dann
|
||||||
|
kontextbezogene Ergebnisse.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>8.1 Indexierung der Referenzdokumente</h3>
|
||||||
|
<CodeBlock language="text" filename="Indexierung: Vom Upload zum durchsuchbaren Referenzdokument">
|
||||||
|
{`Referenzdokument (verschluesselt auf Server)
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 1. Passphrase-Verifikation │ ← SDK sendet Schluessel-Hash
|
||||||
|
│ Hash pruefen │ Server vergleicht mit gespeichertem Hash
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 2. Entschluesselung │ ← Temporaer im Arbeitsspeicher
|
||||||
|
│ AES-256-GCM Decrypt │ (wird nach Verarbeitung geloescht)
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 3. Text-Extraktion │ ← PDF → Klartext
|
||||||
|
│ Tabellen, Listen, Absaetze │
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 4. Chunking │ ← Text in ~1.000-Zeichen-Abschnitte
|
||||||
|
│ Ueberlappung: 200 Zeichen │ (mit Ueberlappung fuer Kontexterhalt)
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 5. Embedding │ ← Jeder Abschnitt wird in einen
|
||||||
|
│ Text → 1.536 Zahlen │ Bedeutungsvektor umgewandelt
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 6. Re-Encryption │ ← Jeder Chunk wird ERNEUT verschluesselt
|
||||||
|
│ AES-256-GCM pro Chunk │ bevor er gespeichert wird
|
||||||
|
└──────────┬─────────────────────────┘
|
||||||
|
|
|
||||||
|
v
|
||||||
|
┌────────────────────────────────────┐
|
||||||
|
│ 7. Qdrant-Indexierung │ ← Vektor + verschluesselter Chunk
|
||||||
|
│ tenant_id: "firma-alpha-001" │ werden mit Tenant-Filter gespeichert
|
||||||
|
│ training_allowed: false │
|
||||||
|
└────────────────────────────────────┘`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<h3>8.2 Wie die KI eine Anfrage bearbeitet (RAG-Query)</h3>
|
||||||
|
<ol>
|
||||||
|
<li>
|
||||||
|
<strong>Anfrage formulieren:</strong> Das SDK sendet eine Suchanfrage mit dem
|
||||||
|
zu verarbeitenden Dokument und den gewuenschten Kriterien.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Semantische Suche:</strong> Die Anfrage wird in einen Vektor umgewandelt und
|
||||||
|
gegen die Referenz-Vektoren in Qdrant gesucht -- <em>nur im Namespace des Kunden</em>.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Entschluesselung:</strong> Die gefundenen Chunks werden mit der Passphrase
|
||||||
|
des Kunden entschluesselt.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>KI-Antwort:</strong> Die entschluesselten Referenzpassagen werden als Kontext
|
||||||
|
an die KI uebergeben, die daraus ein Ergebnis generiert.
|
||||||
|
</li>
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 9. KEY SHARING */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="key-sharing">9. Key Sharing: Zusammenarbeit ermoeglichen</h2>
|
||||||
|
<p>
|
||||||
|
In vielen Geschaeftsprozessen muessen mehrere Personen oder Abteilungen auf die gleichen
|
||||||
|
Daten zugreifen -- z.B. fuer Vier-Augen-Prinzip, Qualitaetskontrolle oder externe Audits.
|
||||||
|
Das Key-Sharing-System ermoeglicht es dem Eigentuemer, seinen Namespace sicher mit
|
||||||
|
anderen zu teilen.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>9.1 Einladungs-Workflow</h3>
|
||||||
|
<CodeBlock language="text" filename="Key Sharing: Sicheres Teilen zwischen Bearbeitern">
|
||||||
|
{`Eigentuemer Server Eingeladener
|
||||||
|
│ │ │
|
||||||
|
│ 1. Einladung senden │ │
|
||||||
|
│ (E-Mail + Rolle + Scope) │ │
|
||||||
|
│─────────────────────────────────▶ │
|
||||||
|
│ │ │
|
||||||
|
│ │ 2. Einladung erstellt │
|
||||||
|
│ │ (14 Tage gueltig) │
|
||||||
|
│ │ │
|
||||||
|
│ │ 3. Benachrichtigung ──────▶│
|
||||||
|
│ │ │
|
||||||
|
│ │ 4. Einladung annehmen
|
||||||
|
│ │◀─────────────────────────────│
|
||||||
|
│ │ │
|
||||||
|
│ │ 5. Key-Share erstellt │
|
||||||
|
│ │ (verschluesselte │
|
||||||
|
│ │ Passphrase) │
|
||||||
|
│ │ │
|
||||||
|
│ │ 6. Eingeladener kann ──────▶│
|
||||||
|
│ │ jetzt Daten im │
|
||||||
|
│ │ Namespace abfragen │
|
||||||
|
│ │ │
|
||||||
|
│ 7. Zugriff widerrufen │ │
|
||||||
|
│ (jederzeit moeglich) │ │
|
||||||
|
│─────────────────────────────────▶ │
|
||||||
|
│ │ Share deaktiviert │`}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
<h3>9.2 Rollen beim Key-Sharing</h3>
|
||||||
|
<div className="not-prose my-4 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Rolle</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Typischer Nutzer</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Rechte</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr><td className="px-4 py-3 font-medium">Owner</td><td className="px-4 py-3">Projektverantwortlicher</td><td className="px-4 py-3">Vollzugriff, kann teilen & widerrufen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">Reviewer</td><td className="px-4 py-3">Qualitaetssicherung</td><td className="px-4 py-3">Lesen, RAG-Queries, eigene Anmerkungen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">Auditor</td><td className="px-4 py-3">Externer Pruefer</td><td className="px-4 py-3">Nur Lesen (Aufsichtsfunktion)</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 10. AUDIT-TRAIL */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="audit">10. Audit-Trail: Vollstaendige Nachvollziehbarkeit</h2>
|
||||||
|
<p>
|
||||||
|
Jede Aktion im Namespace wird revisionssicher im <strong>Audit-Log</strong> gespeichert.
|
||||||
|
Das ist essenziell fuer Compliance-Anforderungen und externe Audits.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="not-prose my-4 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Event</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Was protokolliert wird</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr><td className="px-4 py-3 font-medium">upload</td><td className="px-4 py-3">Dokument hochgeladen (Dateigroesse, Metadaten, Zeitstempel)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">index</td><td className="px-4 py-3">Referenzdokument indexiert (Anzahl Chunks, Dauer)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">rag_query</td><td className="px-4 py-3">RAG-Suchanfrage ausgefuehrt (Query-Hash, Anzahl Ergebnisse)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">analyze</td><td className="px-4 py-3">KI-Verarbeitung gestartet (Dokument-Token, Modell, Dauer)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">share</td><td className="px-4 py-3">Namespace mit anderem Nutzer geteilt (Empfaenger, Rolle)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">revoke_share</td><td className="px-4 py-3">Zugriff widerrufen (wer, wann)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">decrypt</td><td className="px-4 py-3">Ergebnis entschluesselt (durch wen, Zeitstempel)</td></tr>
|
||||||
|
<tr><td className="px-4 py-3 font-medium">delete</td><td className="px-4 py-3">Dokument geloescht (Soft Delete, bleibt in Logs)</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 11. API-ENDPUNKTE */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="api">11. API-Endpunkte (SDK-Referenz)</h2>
|
||||||
|
<p>
|
||||||
|
Die folgenden Endpunkte sind ueber das SDK oder direkt via REST ansprechbar.
|
||||||
|
Authentifizierung erfolgt ueber API-Key + JWT-Token.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3>11.1 Namespace-Verwaltung</h3>
|
||||||
|
<div className="not-prose my-4 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Methode</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Endpunkt</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Beschreibung</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/upload</td><td className="px-4 py-3">Verschluesseltes Dokument hochladen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-green-100 text-green-800 text-xs font-bold">GET</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/documents</td><td className="px-4 py-3">Eigene Dokumente auflisten</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-green-100 text-green-800 text-xs font-bold">GET</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/documents/{'{id}'}</td><td className="px-4 py-3">Einzelnes Dokument abrufen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-red-100 text-red-800 text-xs font-bold">DELETE</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/documents/{'{id}'}</td><td className="px-4 py-3">Dokument loeschen (Soft Delete)</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>11.2 Referenzdokumente & RAG</h3>
|
||||||
|
<div className="not-prose my-4 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Methode</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Endpunkt</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Beschreibung</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/references/upload</td><td className="px-4 py-3">Referenzdokument hochladen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/references/{'{id}'}/index</td><td className="px-4 py-3">Referenz fuer RAG indexieren</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/rag-query</td><td className="px-4 py-3">RAG-Suchanfrage ausfuehren</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/analyze</td><td className="px-4 py-3">KI-Verarbeitung anstossen</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>11.3 Key Sharing</h3>
|
||||||
|
<div className="not-prose my-4 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Methode</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Endpunkt</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Beschreibung</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-blue-100 text-blue-800 text-xs font-bold">POST</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/share</td><td className="px-4 py-3">Namespace mit anderem Nutzer teilen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-green-100 text-green-800 text-xs font-bold">GET</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/shares</td><td className="px-4 py-3">Aktive Shares auflisten</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-red-100 text-red-800 text-xs font-bold">DELETE</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/shares/{'{shareId}'}</td><td className="px-4 py-3">Zugriff widerrufen</td></tr>
|
||||||
|
<tr><td className="px-4 py-3"><span className="px-2 py-0.5 rounded bg-green-100 text-green-800 text-xs font-bold">GET</span></td><td className="px-4 py-3 font-mono text-sm">/api/v1/namespace/shared-with-me</td><td className="px-4 py-3">Mit mir geteilte Namespaces</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ============================================================ */}
|
||||||
|
{/* 12. ZUSAMMENFASSUNG */}
|
||||||
|
{/* ============================================================ */}
|
||||||
|
<h2 id="zusammenfassung">12. Zusammenfassung: Compliance-Garantien</h2>
|
||||||
|
|
||||||
|
<div className="not-prose my-6 overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-gray-200 text-sm">
|
||||||
|
<thead className="bg-gray-50">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Garantie</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Wie umgesetzt</th>
|
||||||
|
<th className="px-4 py-3 text-left font-medium text-gray-500">Regelwerk</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-gray-200">
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Keine PII verlaesst das Kundensystem</td>
|
||||||
|
<td className="px-4 py-3">Header-Redaction + verschluesselte Identity-Map</td>
|
||||||
|
<td className="px-4 py-3">DSGVO Art. 4 Nr. 5</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Betreiber kann nicht mitlesen</td>
|
||||||
|
<td className="px-4 py-3">Client-seitige AES-256-GCM Verschluesselung</td>
|
||||||
|
<td className="px-4 py-3">DSGVO Art. 32</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Kein Zugriff durch andere Kunden</td>
|
||||||
|
<td className="px-4 py-3">Tenant-Isolation (Namespace) auf allen 3 Ebenen</td>
|
||||||
|
<td className="px-4 py-3">DSGVO Art. 25</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Kein KI-Training mit Kundendaten</td>
|
||||||
|
<td className="px-4 py-3"><code>training_allowed: false</code> auf allen Vektoren</td>
|
||||||
|
<td className="px-4 py-3">AI Act Art. 10</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Alles nachvollziehbar</td>
|
||||||
|
<td className="px-4 py-3">Vollstaendiger Audit-Trail aller Aktionen</td>
|
||||||
|
<td className="px-4 py-3">DSGVO Art. 5 Abs. 2</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td className="px-4 py-3 font-medium">Kunde behaelt volle Kontrolle</td>
|
||||||
|
<td className="px-4 py-3">Jederzeitiger Widerruf, Loeschung, Datenexport</td>
|
||||||
|
<td className="px-4 py-3">DSGVO Art. 17, 20</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<InfoBox type="success" title="Das Wichtigste in einem Satz">
|
||||||
|
Die Namespace-Technologie ermoeglicht KI-gestuetzte Datenverarbeitung in der Cloud, bei der
|
||||||
|
<strong> keine personenbezogenen Daten das Kundensystem verlassen</strong>, alle Daten
|
||||||
|
<strong> Ende-zu-Ende verschluesselt</strong> sind, jeder Kunde seinen
|
||||||
|
<strong> eigenen abgeschotteten Namespace</strong> hat, und ein
|
||||||
|
<strong> vollstaendiger Audit-Trail</strong> jede Aktion dokumentiert.
|
||||||
|
</InfoBox>
|
||||||
|
</DevPortalLayout>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -72,6 +72,7 @@ const navigation: NavItem[] = [
|
|||||||
icon: <BookOpen className="w-4 h-4" />,
|
icon: <BookOpen className="w-4 h-4" />,
|
||||||
items: [
|
items: [
|
||||||
{ title: 'Compliance Service', href: '/development/docs' },
|
{ title: 'Compliance Service', href: '/development/docs' },
|
||||||
|
{ title: 'Klausur-Namespace (BYOEH)', href: '/development/byoeh' },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user