package maximizer import "testing" func newTestOptimizer(t *testing.T) *Optimizer { t.Helper() rules := loadTestRules(t) eval := NewEvaluator(rules) return NewOptimizer(eval) } // --- Golden Test Cases --- func TestGC01_HRFullAutomationBlocked(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoFull, DecisionBinding: BindingFullyBinding, DecisionImpact: ImpactHigh, Domain: DomainHR, DataType: DataPersonal, HumanInLoop: HILNone, Explainability: ExplainNone, RiskClassification: RiskMinimal, LegalBasis: LegalContract, ModelType: ModelBlackboxLLM, DeploymentScope: ScopeExternal, } result := opt.Optimize(config) if result.OriginalCompliant { t.Fatal("expected original to be non-compliant") } if result.MaxSafeConfig == nil { t.Fatal("expected an optimized variant") } max := result.MaxSafeConfig if max.Config.AutomationLevel == AutoFull { t.Error("optimizer must change automation_level from full") } if max.Config.HumanInLoop != HILRequired { t.Errorf("expected human_in_loop=required, got %s", max.Config.HumanInLoop) } if max.Config.DecisionBinding == BindingFullyBinding { t.Error("expected decision_binding to change from fully_binding") } // Verify the optimized config is actually compliant if !max.Evaluation.IsCompliant { t.Errorf("MaxSafeConfig is not compliant: violations=%+v", max.Evaluation.Violations) } } func TestGC02_HRRankingWithHumanReviewAllowed(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoAssistive, DecisionBinding: BindingHumanReview, DecisionImpact: ImpactHigh, Domain: DomainHR, DataType: DataPersonal, HumanInLoop: HILRequired, Explainability: ExplainBasic, RiskClassification: RiskMinimal, LegalBasis: LegalContract, TransparencyRequired: true, LoggingRequired: true, ModelType: ModelBlackboxLLM, DeploymentScope: ScopeExternal, } result := opt.Optimize(config) // Should be allowed with conditions (requirements from high-risk classification) if result.MaxSafeConfig == nil { t.Fatal("expected a variant") } } func TestGC05_SensitiveDataWithoutLegalBasis(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ DataType: DataSensitive, LegalBasis: LegalLegitimateInterest, DecisionImpact: ImpactHigh, Domain: DomainHR, AutomationLevel: AutoAssistive, HumanInLoop: HILRequired, DecisionBinding: BindingHumanReview, } result := opt.Optimize(config) if result.OriginalCompliant { t.Error("expected non-compliant: sensitive data with legitimate_interest") } if result.MaxSafeConfig == nil { t.Fatal("expected optimized variant") } if result.MaxSafeConfig.Config.LegalBasis != LegalConsent { t.Errorf("expected legal_basis=consent, got %s", result.MaxSafeConfig.Config.LegalBasis) } } func TestGC16_ProhibitedPracticeBlocked(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ RiskClassification: RiskProhibited, DeploymentScope: ScopePublic, } result := opt.Optimize(config) if result.OriginalCompliant { t.Error("expected non-compliant for prohibited") } // Prohibited = no optimization possible if len(result.Variants) > 0 { t.Error("expected no variants for prohibited classification") } } func TestGC18_OptimizerMinimalChange(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoFull, DecisionBinding: BindingFullyBinding, DecisionImpact: ImpactHigh, Domain: DomainHR, DataType: DataPersonal, HumanInLoop: HILNone, Explainability: ExplainBasic, RiskClassification: RiskMinimal, LegalBasis: LegalContract, ModelType: ModelStatistical, DeploymentScope: ScopeInternal, } result := opt.Optimize(config) if result.MaxSafeConfig == nil { t.Fatal("expected optimized variant") } max := result.MaxSafeConfig // Domain must NOT change if max.Config.Domain != DomainHR { t.Errorf("optimizer must not change domain: got %s", max.Config.Domain) } // Explainability was already basic, should stay if max.Config.Explainability != ExplainBasic { t.Errorf("optimizer should keep explainability=basic, got %s", max.Config.Explainability) } // Model type should not change unnecessarily if max.Config.ModelType != ModelStatistical { t.Errorf("optimizer should not change model_type unnecessarily, got %s", max.Config.ModelType) } } func TestGC20_AlreadyCompliantNoChanges(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoAssistive, DecisionBinding: BindingNonBinding, DecisionImpact: ImpactLow, Domain: DomainGeneral, DataType: DataNonPersonal, HumanInLoop: HILRequired, Explainability: ExplainBasic, RiskClassification: RiskMinimal, LegalBasis: LegalContract, TransparencyRequired: false, LoggingRequired: false, ModelType: ModelRuleBased, DeploymentScope: ScopeInternal, } result := opt.Optimize(config) if !result.OriginalCompliant { t.Error("expected compliant") } if result.MaxSafeConfig == nil { t.Fatal("expected variant") } if result.MaxSafeConfig.DeltaCount != 0 { t.Errorf("expected 0 deltas for compliant config, got %d", result.MaxSafeConfig.DeltaCount) } if result.MaxSafeConfig.UtilityScore != 100 { t.Errorf("expected utility 100, got %d", result.MaxSafeConfig.UtilityScore) } } // --- Meta Tests --- func TestMT01_Determinism(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoFull, DecisionImpact: ImpactHigh, Domain: DomainHR, DataType: DataPersonal, HumanInLoop: HILNone, } r1 := opt.Optimize(config) r2 := opt.Optimize(config) if r1.OriginalCompliant != r2.OriginalCompliant { t.Error("determinism failed: different compliance result") } if len(r1.Variants) != len(r2.Variants) { t.Errorf("determinism failed: %d vs %d variants", len(r1.Variants), len(r2.Variants)) } if r1.MaxSafeConfig != nil && r2.MaxSafeConfig != nil { if r1.MaxSafeConfig.CompositeScore != r2.MaxSafeConfig.CompositeScore { t.Error("determinism failed: different composite scores") } } } func TestMT03_ViolationsReferenceObligations(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoFull, DecisionImpact: ImpactHigh, DataType: DataSensitive, } result := opt.Optimize(config) for _, v := range result.OriginalEval.Violations { if v.ObligationID == "" { t.Errorf("violation %s missing obligation reference", v.RuleID) } } for _, tr := range result.OriginalEval.TriggeredRules { if tr.ObligationID == "" { t.Errorf("triggered rule %s missing obligation reference", tr.RuleID) } } } func TestMT05_OptimizerMinimality(t *testing.T) { opt := newTestOptimizer(t) // Config that only violates one dimension config := &DimensionConfig{ AutomationLevel: AutoAssistive, DecisionBinding: BindingHumanReview, DecisionImpact: ImpactLow, Domain: DomainGeneral, DataType: DataSensitive, // only violation: needs consent HumanInLoop: HILRequired, Explainability: ExplainBasic, RiskClassification: RiskMinimal, LegalBasis: LegalLegitimateInterest, // must change to consent TransparencyRequired: false, LoggingRequired: false, ModelType: ModelRuleBased, DeploymentScope: ScopeInternal, } result := opt.Optimize(config) if result.MaxSafeConfig == nil { t.Fatal("expected optimized variant") } // Check that only compliance-related dimensions changed for _, d := range result.MaxSafeConfig.Deltas { switch d.Dimension { case "legal_basis", "transparency_required", "logging_required", "data_type": // Expected: legal_basis→consent, transparency, logging for sensitive data // data_type→personal is from optimizer meta-rule (reduce unnecessary sensitivity) default: t.Errorf("unexpected dimension change: %s (%s → %s)", d.Dimension, d.From, d.To) } } } func TestOptimizeProducesRankedVariants(t *testing.T) { opt := newTestOptimizer(t) config := &DimensionConfig{ AutomationLevel: AutoFull, DecisionImpact: ImpactHigh, Domain: DomainHR, DataType: DataPersonal, HumanInLoop: HILNone, Explainability: ExplainNone, ModelType: ModelBlackboxLLM, DeploymentScope: ScopeExternal, } result := opt.Optimize(config) if len(result.Variants) < 2 { t.Skipf("only %d variants generated", len(result.Variants)) } // Verify descending composite score order for i := 1; i < len(result.Variants); i++ { if result.Variants[i].CompositeScore > result.Variants[i-1].CompositeScore { t.Errorf("variants not sorted: [%d]=%.1f > [%d]=%.1f", i, result.Variants[i].CompositeScore, i-1, result.Variants[i-1].CompositeScore) } } }