feat: Compliance Maximizer — Regulatory Optimization Engine
Some checks failed
Build + Deploy / build-admin-compliance (push) Successful in 1m45s
Build + Deploy / build-backend-compliance (push) Successful in 4m42s
Build + Deploy / build-ai-sdk (push) Successful in 46s
Build + Deploy / build-developer-portal (push) Successful in 1m6s
Build + Deploy / build-tts (push) Successful in 1m14s
Build + Deploy / build-document-crawler (push) Successful in 31s
Build + Deploy / build-dsms-gateway (push) Successful in 24s
CI / branch-name (push) Has been skipped
CI / guardrail-integrity (push) Has been skipped
CI / loc-budget (push) Failing after 15s
CI / secret-scan (push) Has been skipped
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / nodejs-build (push) Successful in 2m27s
CI / dep-audit (push) Has been skipped
CI / sbom-scan (push) Has been skipped
CI / test-go (push) Failing after 37s
CI / test-python-backend (push) Successful in 42s
CI / test-python-document-crawler (push) Successful in 25s
CI / test-python-dsms-gateway (push) Successful in 23s
CI / validate-canonical-controls (push) Successful in 18s
Build + Deploy / trigger-orca (push) Successful in 4m35s

Neues Modul das den regulatorischen Spielraum fuer KI-Use-Cases
deterministisch berechnet und optimale Konfigurationen vorschlaegt.

Kernfeatures:
- 13-Dimensionen Constraint-Space (DSGVO + AI Act)
- 3-Zonen-Analyse: Verboten / Eingeschraenkt / Erlaubt
- Deterministische Optimizer-Engine (kein LLM im Kern)
- 28 Constraint-Regeln aus DSGVO, AI Act, EDPB Guidelines
- 28 Tests (Golden Suite + Meta-Tests)
- REST API: /sdk/v1/maximizer/* (9 Endpoints)
- Frontend: 3-Zonen-Visualisierung, Dimension-Form, Score-Gauges

[migration-approved]

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-04-23 09:10:20 +02:00
parent 01bf1463b8
commit 1ac716261c
30 changed files with 3779 additions and 1 deletions

View File

@@ -0,0 +1,209 @@
package maximizer
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgxpool"
)
// Optimization is the DB entity for a maximizer optimization result.
type Optimization struct {
ID uuid.UUID `json:"id"`
TenantID uuid.UUID `json:"tenant_id"`
AssessmentID *uuid.UUID `json:"assessment_id,omitempty"`
Title string `json:"title"`
Status string `json:"status"`
InputConfig DimensionConfig `json:"input_config"`
IsCompliant bool `json:"is_compliant"`
OriginalEvaluation EvaluationResult `json:"original_evaluation"`
MaxSafeConfig *OptimizedVariant `json:"max_safe_config,omitempty"`
Variants []OptimizedVariant `json:"variants"`
ZoneMap map[string]ZoneInfo `json:"zone_map"`
ConstraintVersion string `json:"constraint_version"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
CreatedBy uuid.UUID `json:"created_by"`
}
// Store handles maximizer data persistence.
type Store struct {
pool *pgxpool.Pool
}
// NewStore creates a new maximizer store.
func NewStore(pool *pgxpool.Pool) *Store {
return &Store{pool: pool}
}
// CreateOptimization persists a new optimization result.
func (s *Store) CreateOptimization(ctx context.Context, o *Optimization) error {
o.ID = uuid.New()
o.CreatedAt = time.Now().UTC()
o.UpdatedAt = o.CreatedAt
if o.Status == "" {
o.Status = "completed"
}
if o.ConstraintVersion == "" {
o.ConstraintVersion = "1.0.0"
}
inputConfig, _ := json.Marshal(o.InputConfig)
originalEval, _ := json.Marshal(o.OriginalEvaluation)
maxSafe, _ := json.Marshal(o.MaxSafeConfig)
variants, _ := json.Marshal(o.Variants)
zoneMap, _ := json.Marshal(o.ZoneMap)
_, err := s.pool.Exec(ctx, `
INSERT INTO maximizer_optimizations (
id, tenant_id, assessment_id, title, status,
input_config, is_compliant, original_evaluation,
max_safe_config, variants, zone_map,
constraint_version, created_at, updated_at, created_by
) VALUES (
$1, $2, $3, $4, $5,
$6, $7, $8,
$9, $10, $11,
$12, $13, $14, $15
)`,
o.ID, o.TenantID, o.AssessmentID, o.Title, o.Status,
inputConfig, o.IsCompliant, originalEval,
maxSafe, variants, zoneMap,
o.ConstraintVersion, o.CreatedAt, o.UpdatedAt, o.CreatedBy,
)
if err != nil {
return fmt.Errorf("create optimization: %w", err)
}
return nil
}
// GetOptimization retrieves a single optimization by ID.
func (s *Store) GetOptimization(ctx context.Context, id uuid.UUID) (*Optimization, error) {
row := s.pool.QueryRow(ctx, `
SELECT id, tenant_id, assessment_id, title, status,
input_config, is_compliant, original_evaluation,
max_safe_config, variants, zone_map,
constraint_version, created_at, updated_at, created_by
FROM maximizer_optimizations WHERE id = $1`, id)
return s.scanOptimization(row)
}
// OptimizationFilters for list queries.
type OptimizationFilters struct {
IsCompliant *bool
Search string
Limit int
Offset int
}
// ListOptimizations returns optimizations for a tenant.
func (s *Store) ListOptimizations(ctx context.Context, tenantID uuid.UUID, f *OptimizationFilters) ([]Optimization, int, error) {
if f == nil {
f = &OptimizationFilters{}
}
if f.Limit <= 0 {
f.Limit = 20
}
where := "WHERE tenant_id = $1"
args := []interface{}{tenantID}
idx := 2
if f.IsCompliant != nil {
where += fmt.Sprintf(" AND is_compliant = $%d", idx)
args = append(args, *f.IsCompliant)
idx++
}
if f.Search != "" {
where += fmt.Sprintf(" AND title ILIKE $%d", idx)
args = append(args, "%"+f.Search+"%")
idx++
}
// Count
var total int
countQuery := "SELECT COUNT(*) FROM maximizer_optimizations " + where
if err := s.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
return nil, 0, fmt.Errorf("count optimizations: %w", err)
}
// Fetch
query := fmt.Sprintf(`
SELECT id, tenant_id, assessment_id, title, status,
input_config, is_compliant, original_evaluation,
max_safe_config, variants, zone_map,
constraint_version, created_at, updated_at, created_by
FROM maximizer_optimizations %s
ORDER BY created_at DESC
LIMIT $%d OFFSET $%d`, where, idx, idx+1)
args = append(args, f.Limit, f.Offset)
rows, err := s.pool.Query(ctx, query, args...)
if err != nil {
return nil, 0, fmt.Errorf("list optimizations: %w", err)
}
defer rows.Close()
var results []Optimization
for rows.Next() {
o, err := s.scanOptimizationRows(rows)
if err != nil {
return nil, 0, err
}
results = append(results, *o)
}
return results, total, nil
}
// DeleteOptimization removes an optimization.
func (s *Store) DeleteOptimization(ctx context.Context, id uuid.UUID) error {
_, err := s.pool.Exec(ctx, `DELETE FROM maximizer_optimizations WHERE id = $1`, id)
if err != nil {
return fmt.Errorf("delete optimization: %w", err)
}
return nil
}
func (s *Store) scanOptimization(row pgx.Row) (*Optimization, error) {
var o Optimization
var inputConfig, originalEval, maxSafe, variants, zoneMap []byte
err := row.Scan(
&o.ID, &o.TenantID, &o.AssessmentID, &o.Title, &o.Status,
&inputConfig, &o.IsCompliant, &originalEval,
&maxSafe, &variants, &zoneMap,
&o.ConstraintVersion, &o.CreatedAt, &o.UpdatedAt, &o.CreatedBy,
)
if err != nil {
return nil, fmt.Errorf("scan optimization: %w", err)
}
json.Unmarshal(inputConfig, &o.InputConfig)
json.Unmarshal(originalEval, &o.OriginalEvaluation)
json.Unmarshal(maxSafe, &o.MaxSafeConfig)
json.Unmarshal(variants, &o.Variants)
json.Unmarshal(zoneMap, &o.ZoneMap)
return &o, nil
}
func (s *Store) scanOptimizationRows(rows pgx.Rows) (*Optimization, error) {
var o Optimization
var inputConfig, originalEval, maxSafe, variants, zoneMap []byte
err := rows.Scan(
&o.ID, &o.TenantID, &o.AssessmentID, &o.Title, &o.Status,
&inputConfig, &o.IsCompliant, &originalEval,
&maxSafe, &variants, &zoneMap,
&o.ConstraintVersion, &o.CreatedAt, &o.UpdatedAt, &o.CreatedBy,
)
if err != nil {
return nil, fmt.Errorf("scan optimization row: %w", err)
}
json.Unmarshal(inputConfig, &o.InputConfig)
json.Unmarshal(originalEval, &o.OriginalEvaluation)
json.Unmarshal(maxSafe, &o.MaxSafeConfig)
json.Unmarshal(variants, &o.Variants)
json.Unmarshal(zoneMap, &o.ZoneMap)
return &o, nil
}