Files
breakpilot-compliance/ai-compliance-sdk/internal/maximizer/optimizer_test.go
Benjamin Admin 1ac716261c
Some checks failed
Build + Deploy / build-admin-compliance (push) Successful in 1m45s
Build + Deploy / build-backend-compliance (push) Successful in 4m42s
Build + Deploy / build-ai-sdk (push) Successful in 46s
Build + Deploy / build-developer-portal (push) Successful in 1m6s
Build + Deploy / build-tts (push) Successful in 1m14s
Build + Deploy / build-document-crawler (push) Successful in 31s
Build + Deploy / build-dsms-gateway (push) Successful in 24s
CI / branch-name (push) Has been skipped
CI / guardrail-integrity (push) Has been skipped
CI / loc-budget (push) Failing after 15s
CI / secret-scan (push) Has been skipped
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / nodejs-build (push) Successful in 2m27s
CI / dep-audit (push) Has been skipped
CI / sbom-scan (push) Has been skipped
CI / test-go (push) Failing after 37s
CI / test-python-backend (push) Successful in 42s
CI / test-python-document-crawler (push) Successful in 25s
CI / test-python-dsms-gateway (push) Successful in 23s
CI / validate-canonical-controls (push) Successful in 18s
Build + Deploy / trigger-orca (push) Successful in 4m35s
feat: Compliance Maximizer — Regulatory Optimization Engine
Neues Modul das den regulatorischen Spielraum fuer KI-Use-Cases
deterministisch berechnet und optimale Konfigurationen vorschlaegt.

Kernfeatures:
- 13-Dimensionen Constraint-Space (DSGVO + AI Act)
- 3-Zonen-Analyse: Verboten / Eingeschraenkt / Erlaubt
- Deterministische Optimizer-Engine (kein LLM im Kern)
- 28 Constraint-Regeln aus DSGVO, AI Act, EDPB Guidelines
- 28 Tests (Golden Suite + Meta-Tests)
- REST API: /sdk/v1/maximizer/* (9 Endpoints)
- Frontend: 3-Zonen-Visualisierung, Dimension-Form, Score-Gauges

[migration-approved]

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-23 09:10:20 +02:00

301 lines
9.0 KiB
Go

package maximizer
import "testing"
func newTestOptimizer(t *testing.T) *Optimizer {
t.Helper()
rules := loadTestRules(t)
eval := NewEvaluator(rules)
return NewOptimizer(eval)
}
// --- Golden Test Cases ---
func TestGC01_HRFullAutomationBlocked(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoFull,
DecisionBinding: BindingFullyBinding,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
DataType: DataPersonal,
HumanInLoop: HILNone,
Explainability: ExplainNone,
RiskClassification: RiskMinimal,
LegalBasis: LegalContract,
ModelType: ModelBlackboxLLM,
DeploymentScope: ScopeExternal,
}
result := opt.Optimize(config)
if result.OriginalCompliant {
t.Fatal("expected original to be non-compliant")
}
if result.MaxSafeConfig == nil {
t.Fatal("expected an optimized variant")
}
max := result.MaxSafeConfig
if max.Config.AutomationLevel == AutoFull {
t.Error("optimizer must change automation_level from full")
}
if max.Config.HumanInLoop != HILRequired {
t.Errorf("expected human_in_loop=required, got %s", max.Config.HumanInLoop)
}
if max.Config.DecisionBinding == BindingFullyBinding {
t.Error("expected decision_binding to change from fully_binding")
}
// Verify the optimized config is actually compliant
if !max.Evaluation.IsCompliant {
t.Errorf("MaxSafeConfig is not compliant: violations=%+v", max.Evaluation.Violations)
}
}
func TestGC02_HRRankingWithHumanReviewAllowed(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoAssistive,
DecisionBinding: BindingHumanReview,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
DataType: DataPersonal,
HumanInLoop: HILRequired,
Explainability: ExplainBasic,
RiskClassification: RiskMinimal,
LegalBasis: LegalContract,
TransparencyRequired: true,
LoggingRequired: true,
ModelType: ModelBlackboxLLM,
DeploymentScope: ScopeExternal,
}
result := opt.Optimize(config)
// Should be allowed with conditions (requirements from high-risk classification)
if result.MaxSafeConfig == nil {
t.Fatal("expected a variant")
}
}
func TestGC05_SensitiveDataWithoutLegalBasis(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
DataType: DataSensitive,
LegalBasis: LegalLegitimateInterest,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
AutomationLevel: AutoAssistive,
HumanInLoop: HILRequired,
DecisionBinding: BindingHumanReview,
}
result := opt.Optimize(config)
if result.OriginalCompliant {
t.Error("expected non-compliant: sensitive data with legitimate_interest")
}
if result.MaxSafeConfig == nil {
t.Fatal("expected optimized variant")
}
if result.MaxSafeConfig.Config.LegalBasis != LegalConsent {
t.Errorf("expected legal_basis=consent, got %s", result.MaxSafeConfig.Config.LegalBasis)
}
}
func TestGC16_ProhibitedPracticeBlocked(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
RiskClassification: RiskProhibited,
DeploymentScope: ScopePublic,
}
result := opt.Optimize(config)
if result.OriginalCompliant {
t.Error("expected non-compliant for prohibited")
}
// Prohibited = no optimization possible
if len(result.Variants) > 0 {
t.Error("expected no variants for prohibited classification")
}
}
func TestGC18_OptimizerMinimalChange(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoFull,
DecisionBinding: BindingFullyBinding,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
DataType: DataPersonal,
HumanInLoop: HILNone,
Explainability: ExplainBasic,
RiskClassification: RiskMinimal,
LegalBasis: LegalContract,
ModelType: ModelStatistical,
DeploymentScope: ScopeInternal,
}
result := opt.Optimize(config)
if result.MaxSafeConfig == nil {
t.Fatal("expected optimized variant")
}
max := result.MaxSafeConfig
// Domain must NOT change
if max.Config.Domain != DomainHR {
t.Errorf("optimizer must not change domain: got %s", max.Config.Domain)
}
// Explainability was already basic, should stay
if max.Config.Explainability != ExplainBasic {
t.Errorf("optimizer should keep explainability=basic, got %s", max.Config.Explainability)
}
// Model type should not change unnecessarily
if max.Config.ModelType != ModelStatistical {
t.Errorf("optimizer should not change model_type unnecessarily, got %s", max.Config.ModelType)
}
}
func TestGC20_AlreadyCompliantNoChanges(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoAssistive,
DecisionBinding: BindingNonBinding,
DecisionImpact: ImpactLow,
Domain: DomainGeneral,
DataType: DataNonPersonal,
HumanInLoop: HILRequired,
Explainability: ExplainBasic,
RiskClassification: RiskMinimal,
LegalBasis: LegalContract,
TransparencyRequired: false,
LoggingRequired: false,
ModelType: ModelRuleBased,
DeploymentScope: ScopeInternal,
}
result := opt.Optimize(config)
if !result.OriginalCompliant {
t.Error("expected compliant")
}
if result.MaxSafeConfig == nil {
t.Fatal("expected variant")
}
if result.MaxSafeConfig.DeltaCount != 0 {
t.Errorf("expected 0 deltas for compliant config, got %d", result.MaxSafeConfig.DeltaCount)
}
if result.MaxSafeConfig.UtilityScore != 100 {
t.Errorf("expected utility 100, got %d", result.MaxSafeConfig.UtilityScore)
}
}
// --- Meta Tests ---
func TestMT01_Determinism(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoFull,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
DataType: DataPersonal,
HumanInLoop: HILNone,
}
r1 := opt.Optimize(config)
r2 := opt.Optimize(config)
if r1.OriginalCompliant != r2.OriginalCompliant {
t.Error("determinism failed: different compliance result")
}
if len(r1.Variants) != len(r2.Variants) {
t.Errorf("determinism failed: %d vs %d variants", len(r1.Variants), len(r2.Variants))
}
if r1.MaxSafeConfig != nil && r2.MaxSafeConfig != nil {
if r1.MaxSafeConfig.CompositeScore != r2.MaxSafeConfig.CompositeScore {
t.Error("determinism failed: different composite scores")
}
}
}
func TestMT03_ViolationsReferenceObligations(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoFull,
DecisionImpact: ImpactHigh,
DataType: DataSensitive,
}
result := opt.Optimize(config)
for _, v := range result.OriginalEval.Violations {
if v.ObligationID == "" {
t.Errorf("violation %s missing obligation reference", v.RuleID)
}
}
for _, tr := range result.OriginalEval.TriggeredRules {
if tr.ObligationID == "" {
t.Errorf("triggered rule %s missing obligation reference", tr.RuleID)
}
}
}
func TestMT05_OptimizerMinimality(t *testing.T) {
opt := newTestOptimizer(t)
// Config that only violates one dimension
config := &DimensionConfig{
AutomationLevel: AutoAssistive,
DecisionBinding: BindingHumanReview,
DecisionImpact: ImpactLow,
Domain: DomainGeneral,
DataType: DataSensitive, // only violation: needs consent
HumanInLoop: HILRequired,
Explainability: ExplainBasic,
RiskClassification: RiskMinimal,
LegalBasis: LegalLegitimateInterest, // must change to consent
TransparencyRequired: false,
LoggingRequired: false,
ModelType: ModelRuleBased,
DeploymentScope: ScopeInternal,
}
result := opt.Optimize(config)
if result.MaxSafeConfig == nil {
t.Fatal("expected optimized variant")
}
// Check that only compliance-related dimensions changed
for _, d := range result.MaxSafeConfig.Deltas {
switch d.Dimension {
case "legal_basis", "transparency_required", "logging_required", "data_type":
// Expected: legal_basis→consent, transparency, logging for sensitive data
// data_type→personal is from optimizer meta-rule (reduce unnecessary sensitivity)
default:
t.Errorf("unexpected dimension change: %s (%s → %s)", d.Dimension, d.From, d.To)
}
}
}
func TestOptimizeProducesRankedVariants(t *testing.T) {
opt := newTestOptimizer(t)
config := &DimensionConfig{
AutomationLevel: AutoFull,
DecisionImpact: ImpactHigh,
Domain: DomainHR,
DataType: DataPersonal,
HumanInLoop: HILNone,
Explainability: ExplainNone,
ModelType: ModelBlackboxLLM,
DeploymentScope: ScopeExternal,
}
result := opt.Optimize(config)
if len(result.Variants) < 2 {
t.Skipf("only %d variants generated", len(result.Variants))
}
// Verify descending composite score order
for i := 1; i < len(result.Variants); i++ {
if result.Variants[i].CompositeScore > result.Variants[i-1].CompositeScore {
t.Errorf("variants not sorted: [%d]=%.1f > [%d]=%.1f",
i, result.Variants[i].CompositeScore,
i-1, result.Variants[i-1].CompositeScore)
}
}
}