diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index b587ae7..53e8408 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -109,7 +109,7 @@ Pruefen: `curl -sf http://macmini:8099/health` ### compliance-tts-service - Piper TTS + FFmpeg fuer Schulungsvideos -- Speichert Audio/Video in MinIO (bp-core-minio:9000) +- Speichert Audio/Video in Hetzner Object Storage (nbg1.your-objectstorage.com) - TTS-Modell: `de_DE-thorsten-high.onnx` - Dateien: `main.py`, `tts_engine.py`, `video_generator.py`, `storage.py` diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/[[...path]]/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/[[...path]]/route.ts new file mode 100644 index 0000000..d1aac27 --- /dev/null +++ b/admin-compliance/app/api/sdk/v1/vendor-compliance/[[...path]]/route.ts @@ -0,0 +1,122 @@ +/** + * Vendor Compliance API Proxy - Catch-all route + * Proxies all /api/sdk/v1/vendor-compliance/* requests to backend-compliance + * + * Backend routes: vendors, contracts, findings, control-instances, controls, export + * All under /api/compliance/vendor-compliance/ prefix on backend-compliance:8002 + */ + +import { NextRequest, NextResponse } from 'next/server' + +const BACKEND_URL = process.env.BACKEND_URL || 'http://backend-compliance:8002' + +async function proxyRequest( + request: NextRequest, + pathSegments: string[] | undefined, + method: string +) { + const pathStr = pathSegments?.join('/') || '' + const searchParams = request.nextUrl.searchParams.toString() + const basePath = `${BACKEND_URL}/api/compliance/vendor-compliance` + const url = pathStr + ? `${basePath}/${pathStr}${searchParams ? `?${searchParams}` : ''}` + : `${basePath}${searchParams ? `?${searchParams}` : ''}` + + try { + const headers: HeadersInit = { + 'Content-Type': 'application/json', + } + + const headerNames = ['authorization', 'x-namespace-id', 'x-tenant-slug'] + for (const name of headerNames) { + const value = request.headers.get(name) + if (value) { + headers[name] = value + } + } + + const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i + const clientUserId = request.headers.get('x-user-id') + const clientTenantId = request.headers.get('x-tenant-id') + headers['X-User-ID'] = (clientUserId && uuidRegex.test(clientUserId)) ? clientUserId : '00000000-0000-0000-0000-000000000001' + headers['X-Tenant-ID'] = (clientTenantId && uuidRegex.test(clientTenantId)) ? clientTenantId : (process.env.DEFAULT_TENANT_ID || '9282a473-5c95-4b3a-bf78-0ecc0ec71d3e') + + const fetchOptions: RequestInit = { + method, + headers, + signal: AbortSignal.timeout(60000), + } + + if (method === 'POST' || method === 'PUT' || method === 'PATCH') { + const body = await request.text() + if (body) { + fetchOptions.body = body + } + } + + const response = await fetch(url, fetchOptions) + + if (!response.ok) { + const errorText = await response.text() + let errorJson + try { + errorJson = JSON.parse(errorText) + } catch { + errorJson = { error: errorText } + } + return NextResponse.json( + { error: `Backend Error: ${response.status}`, ...errorJson }, + { status: response.status } + ) + } + + const data = await response.json() + return NextResponse.json(data) + } catch (error) { + console.error('Vendor Compliance API proxy error:', error) + return NextResponse.json( + { error: 'Verbindung zum Compliance Backend fehlgeschlagen' }, + { status: 503 } + ) + } +} + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ path?: string[] }> } +) { + const { path } = await params + return proxyRequest(request, path, 'GET') +} + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ path?: string[] }> } +) { + const { path } = await params + return proxyRequest(request, path, 'POST') +} + +export async function PUT( + request: NextRequest, + { params }: { params: Promise<{ path?: string[] }> } +) { + const { path } = await params + return proxyRequest(request, path, 'PUT') +} + +export async function PATCH( + request: NextRequest, + { params }: { params: Promise<{ path?: string[] }> } +) { + const { path } = await params + return proxyRequest(request, path, 'PATCH') +} + +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ path?: string[] }> } +) { + const { path } = await params + return proxyRequest(request, path, 'DELETE') +} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/contracts/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/contracts/route.ts deleted file mode 100644 index 5fc231a..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/contracts/route.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { v4 as uuidv4 } from 'uuid' -import { ContractDocument } from '@/lib/sdk/vendor-compliance' - -// In-memory storage for demo purposes -const contracts: Map = new Map() - -export async function GET(request: NextRequest) { - try { - const contractList = Array.from(contracts.values()) - - return NextResponse.json({ - success: true, - data: contractList, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching contracts:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch contracts' }, - { status: 500 } - ) - } -} - -export async function POST(request: NextRequest) { - try { - // Handle multipart form data for file upload - const formData = await request.formData() - const file = formData.get('file') as File | null - const vendorId = formData.get('vendorId') as string - const metadataStr = formData.get('metadata') as string - - if (!file || !vendorId) { - return NextResponse.json( - { success: false, error: 'File and vendorId are required' }, - { status: 400 } - ) - } - - const metadata = metadataStr ? JSON.parse(metadataStr) : {} - const id = uuidv4() - - // In production, upload file to storage (MinIO, S3, etc.) - const storagePath = `contracts/${id}/${file.name}` - - const contract: ContractDocument = { - id, - tenantId: 'default', - vendorId, - fileName: `${id}-${file.name}`, - originalName: file.name, - mimeType: file.type, - fileSize: file.size, - storagePath, - documentType: metadata.documentType || 'OTHER', - version: metadata.version || '1.0', - previousVersionId: metadata.previousVersionId, - parties: metadata.parties, - effectiveDate: metadata.effectiveDate ? new Date(metadata.effectiveDate) : undefined, - expirationDate: metadata.expirationDate ? new Date(metadata.expirationDate) : undefined, - autoRenewal: metadata.autoRenewal, - renewalNoticePeriod: metadata.renewalNoticePeriod, - terminationNoticePeriod: metadata.terminationNoticePeriod, - reviewStatus: 'PENDING', - status: 'DRAFT', - createdAt: new Date(), - updatedAt: new Date(), - } - - contracts.set(id, contract) - - return NextResponse.json( - { - success: true, - data: contract, - timestamp: new Date().toISOString(), - }, - { status: 201 } - ) - } catch (error) { - console.error('Error uploading contract:', error) - return NextResponse.json( - { success: false, error: 'Failed to upload contract' }, - { status: 500 } - ) - } -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/controls/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/controls/route.ts deleted file mode 100644 index ec22783..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/controls/route.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { CONTROLS_LIBRARY } from '@/lib/sdk/vendor-compliance' - -export async function GET(request: NextRequest) { - try { - const searchParams = request.nextUrl.searchParams - const domain = searchParams.get('domain') - - let controls = [...CONTROLS_LIBRARY] - - // Filter by domain if provided - if (domain) { - controls = controls.filter((c) => c.domain === domain) - } - - return NextResponse.json({ - success: true, - data: controls, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching controls:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch controls' }, - { status: 500 } - ) - } -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/download/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/download/route.ts deleted file mode 100644 index 6104807..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/download/route.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' - -/** - * GET /api/sdk/v1/vendor-compliance/export/[reportId]/download - * - * Download a generated report file. - * In production, this would redirect to a signed MinIO/S3 URL or stream the file. - */ -export async function GET( - request: NextRequest, - { params }: { params: Promise<{ reportId: string }> } -) { - const { reportId } = await params - - // TODO: Implement actual file download - // This would typically: - // 1. Verify report exists and user has access - // 2. Generate signed URL for MinIO/S3 - // 3. Redirect to signed URL or stream file - - // For now, return a placeholder PDF - const placeholderContent = ` -%PDF-1.4 -1 0 obj -<< /Type /Catalog /Pages 2 0 R >> -endobj -2 0 obj -<< /Type /Pages /Kids [3 0 R] /Count 1 >> -endobj -3 0 obj -<< /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R /Resources << /Font << /F1 5 0 R >> >> >> -endobj -4 0 obj -<< /Length 200 >> -stream -BT -/F1 24 Tf -100 700 Td -(Vendor Compliance Report) Tj -/F1 12 Tf -100 650 Td -(Report ID: ${reportId}) Tj -100 620 Td -(Generated: ${new Date().toISOString()}) Tj -100 580 Td -(This is a placeholder. Implement actual report generation.) Tj -ET -endstream -endobj -5 0 obj -<< /Type /Font /Subtype /Type1 /BaseFont /Helvetica >> -endobj -xref -0 6 -0000000000 65535 f -0000000009 00000 n -0000000058 00000 n -0000000115 00000 n -0000000266 00000 n -0000000519 00000 n -trailer -<< /Size 6 /Root 1 0 R >> -startxref -598 -%%EOF -`.trim() - - // Return as PDF - return new NextResponse(placeholderContent, { - headers: { - 'Content-Type': 'application/pdf', - 'Content-Disposition': `attachment; filename="Report_${reportId.slice(0, 8)}.pdf"`, - }, - }) -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/route.ts deleted file mode 100644 index f993485..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/[reportId]/route.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' - -/** - * GET /api/sdk/v1/vendor-compliance/export/[reportId] - * - * Get report metadata by ID. - */ -export async function GET( - request: NextRequest, - { params }: { params: Promise<{ reportId: string }> } -) { - const { reportId } = await params - - // TODO: Fetch report metadata from database - // For now, return mock data - - return NextResponse.json({ - id: reportId, - status: 'completed', - filename: `Report_${reportId.slice(0, 8)}.pdf`, - generatedAt: new Date().toISOString(), - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), // 24h - }) -} - -/** - * DELETE /api/sdk/v1/vendor-compliance/export/[reportId] - * - * Delete a generated report. - */ -export async function DELETE( - request: NextRequest, - { params }: { params: Promise<{ reportId: string }> } -) { - const { reportId } = await params - - // TODO: Delete report from storage and database - console.log('Deleting report:', reportId) - - return NextResponse.json({ - success: true, - deletedId: reportId, - }) -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/export/route.ts deleted file mode 100644 index fc2b3ab..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/export/route.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { v4 as uuidv4 } from 'uuid' - -/** - * POST /api/sdk/v1/vendor-compliance/export - * - * Generate and export reports in various formats. - * Currently returns mock data - integrate with actual report generation service. - */ - -interface ExportConfig { - reportType: 'VVT_EXPORT' | 'VENDOR_AUDIT' | 'ROPA' | 'MANAGEMENT_SUMMARY' | 'DPIA_INPUT' - format: 'PDF' | 'DOCX' | 'XLSX' | 'JSON' - scope: { - vendorIds: string[] - processingActivityIds: string[] - includeFindings: boolean - includeControls: boolean - includeRiskAssessment: boolean - dateRange?: { - from: string - to: string - } - } -} - -const REPORT_TYPE_NAMES: Record = { - VVT_EXPORT: 'Verarbeitungsverzeichnis', - VENDOR_AUDIT: 'Vendor-Audit-Pack', - ROPA: 'RoPA', - MANAGEMENT_SUMMARY: 'Management-Summary', - DPIA_INPUT: 'DSFA-Input', -} - -const FORMAT_EXTENSIONS: Record = { - PDF: 'pdf', - DOCX: 'docx', - XLSX: 'xlsx', - JSON: 'json', -} - -export async function POST(request: NextRequest) { - try { - const config = (await request.json()) as ExportConfig - - // Validate request - if (!config.reportType || !config.format) { - return NextResponse.json( - { error: 'reportType and format are required' }, - { status: 400 } - ) - } - - // Generate report ID and filename - const reportId = uuidv4() - const timestamp = new Date().toISOString().slice(0, 10).replace(/-/g, '') - const filename = `${REPORT_TYPE_NAMES[config.reportType]}_${timestamp}.${FORMAT_EXTENSIONS[config.format]}` - - // TODO: Implement actual report generation - // This would typically: - // 1. Fetch data from database based on scope - // 2. Generate report using template engine (e.g., docx-templates, pdfkit) - // 3. Store in MinIO/S3 - // 4. Return download URL - - // Mock implementation - simulate processing time - await new Promise((resolve) => setTimeout(resolve, 500)) - - // In production, this would be a signed URL to MinIO/S3 - const downloadUrl = `/api/sdk/v1/vendor-compliance/export/${reportId}/download` - - // Log export for audit trail - console.log('Export generated:', { - reportId, - reportType: config.reportType, - format: config.format, - scope: config.scope, - filename, - generatedAt: new Date().toISOString(), - }) - - return NextResponse.json({ - id: reportId, - reportType: config.reportType, - format: config.format, - filename, - downloadUrl, - generatedAt: new Date().toISOString(), - scope: { - vendorCount: config.scope.vendorIds?.length || 0, - activityCount: config.scope.processingActivityIds?.length || 0, - includesFindings: config.scope.includeFindings, - includesControls: config.scope.includeControls, - includesRiskAssessment: config.scope.includeRiskAssessment, - }, - }) - } catch (error) { - console.error('Export error:', error) - return NextResponse.json( - { error: 'Failed to generate export' }, - { status: 500 } - ) - } -} - -/** - * GET /api/sdk/v1/vendor-compliance/export - * - * List recent exports for the current tenant. - */ -export async function GET() { - // TODO: Implement fetching recent exports from database - // For now, return empty list - return NextResponse.json({ - exports: [], - totalCount: 0, - }) -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/findings/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/findings/route.ts deleted file mode 100644 index 2b94551..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/findings/route.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { Finding } from '@/lib/sdk/vendor-compliance' - -// In-memory storage for demo purposes -const findings: Map = new Map() - -export async function GET(request: NextRequest) { - try { - const searchParams = request.nextUrl.searchParams - const vendorId = searchParams.get('vendorId') - const contractId = searchParams.get('contractId') - const status = searchParams.get('status') - - let findingsList = Array.from(findings.values()) - - // Filter by vendor - if (vendorId) { - findingsList = findingsList.filter((f) => f.vendorId === vendorId) - } - - // Filter by contract - if (contractId) { - findingsList = findingsList.filter((f) => f.contractId === contractId) - } - - // Filter by status - if (status) { - findingsList = findingsList.filter((f) => f.status === status) - } - - return NextResponse.json({ - success: true, - data: findingsList, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching findings:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch findings' }, - { status: 500 } - ) - } -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/[id]/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/[id]/route.ts deleted file mode 100644 index eed1373..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/[id]/route.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' - -// This would reference the same storage as the main route -// In production, this would be database calls - -export async function GET( - request: NextRequest, - { params }: { params: Promise<{ id: string }> } -) { - try { - const { id } = await params - - // In production, fetch from database - return NextResponse.json({ - success: true, - data: null, // Would return the activity - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching processing activity:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch processing activity' }, - { status: 500 } - ) - } -} - -export async function PUT( - request: NextRequest, - { params }: { params: Promise<{ id: string }> } -) { - try { - const { id } = await params - const body = await request.json() - - // In production, update in database - return NextResponse.json({ - success: true, - data: { id, ...body, updatedAt: new Date() }, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error updating processing activity:', error) - return NextResponse.json( - { success: false, error: 'Failed to update processing activity' }, - { status: 500 } - ) - } -} - -export async function DELETE( - request: NextRequest, - { params }: { params: Promise<{ id: string }> } -) { - try { - const { id } = await params - - // In production, delete from database - return NextResponse.json({ - success: true, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error deleting processing activity:', error) - return NextResponse.json( - { success: false, error: 'Failed to delete processing activity' }, - { status: 500 } - ) - } -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/route.ts deleted file mode 100644 index 32e01fe..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/processing-activities/route.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { v4 as uuidv4 } from 'uuid' -import { ProcessingActivity, generateVVTId } from '@/lib/sdk/vendor-compliance' - -// In-memory storage for demo purposes -// In production, this would be replaced with database calls -const processingActivities: Map = new Map() - -export async function GET(request: NextRequest) { - try { - const activities = Array.from(processingActivities.values()) - - return NextResponse.json({ - success: true, - data: activities, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching processing activities:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch processing activities' }, - { status: 500 } - ) - } -} - -export async function POST(request: NextRequest) { - try { - const body = await request.json() - - // Generate IDs - const id = uuidv4() - const existingIds = Array.from(processingActivities.values()).map((a) => a.vvtId) - const vvtId = body.vvtId || generateVVTId(existingIds) - - const activity: ProcessingActivity = { - id, - tenantId: 'default', // Would come from auth context - vvtId, - name: body.name, - responsible: body.responsible, - dpoContact: body.dpoContact, - purposes: body.purposes || [], - dataSubjectCategories: body.dataSubjectCategories || [], - personalDataCategories: body.personalDataCategories || [], - recipientCategories: body.recipientCategories || [], - thirdCountryTransfers: body.thirdCountryTransfers || [], - retentionPeriod: body.retentionPeriod || { description: { de: '', en: '' } }, - technicalMeasures: body.technicalMeasures || [], - legalBasis: body.legalBasis || [], - dataSources: body.dataSources || [], - systems: body.systems || [], - dataFlows: body.dataFlows || [], - protectionLevel: body.protectionLevel || 'MEDIUM', - dpiaRequired: body.dpiaRequired || false, - dpiaJustification: body.dpiaJustification, - subProcessors: body.subProcessors || [], - legalRetentionBasis: body.legalRetentionBasis, - status: body.status || 'DRAFT', - owner: body.owner || '', - lastReviewDate: body.lastReviewDate, - nextReviewDate: body.nextReviewDate, - createdAt: new Date(), - updatedAt: new Date(), - } - - processingActivities.set(id, activity) - - return NextResponse.json( - { - success: true, - data: activity, - timestamp: new Date().toISOString(), - }, - { status: 201 } - ) - } catch (error) { - console.error('Error creating processing activity:', error) - return NextResponse.json( - { success: false, error: 'Failed to create processing activity' }, - { status: 500 } - ) - } -} diff --git a/admin-compliance/app/api/sdk/v1/vendor-compliance/vendors/route.ts b/admin-compliance/app/api/sdk/v1/vendor-compliance/vendors/route.ts deleted file mode 100644 index 2e151dd..0000000 --- a/admin-compliance/app/api/sdk/v1/vendor-compliance/vendors/route.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { NextRequest, NextResponse } from 'next/server' -import { v4 as uuidv4 } from 'uuid' -import { Vendor } from '@/lib/sdk/vendor-compliance' - -// In-memory storage for demo purposes -const vendors: Map = new Map() - -export async function GET(request: NextRequest) { - try { - const vendorList = Array.from(vendors.values()) - - return NextResponse.json({ - success: true, - data: vendorList, - timestamp: new Date().toISOString(), - }) - } catch (error) { - console.error('Error fetching vendors:', error) - return NextResponse.json( - { success: false, error: 'Failed to fetch vendors' }, - { status: 500 } - ) - } -} - -export async function POST(request: NextRequest) { - try { - const body = await request.json() - const id = uuidv4() - - const vendor: Vendor = { - id, - tenantId: 'default', - name: body.name, - legalForm: body.legalForm, - country: body.country, - address: body.address, - website: body.website, - role: body.role, - serviceDescription: body.serviceDescription, - serviceCategory: body.serviceCategory, - dataAccessLevel: body.dataAccessLevel || 'NONE', - processingLocations: body.processingLocations || [], - transferMechanisms: body.transferMechanisms || [], - certifications: body.certifications || [], - primaryContact: body.primaryContact, - dpoContact: body.dpoContact, - securityContact: body.securityContact, - contractTypes: body.contractTypes || [], - contracts: body.contracts || [], - inherentRiskScore: body.inherentRiskScore || 50, - residualRiskScore: body.residualRiskScore || 50, - manualRiskAdjustment: body.manualRiskAdjustment, - riskJustification: body.riskJustification, - reviewFrequency: body.reviewFrequency || 'ANNUAL', - lastReviewDate: body.lastReviewDate, - nextReviewDate: body.nextReviewDate, - status: body.status || 'ACTIVE', - processingActivityIds: body.processingActivityIds || [], - notes: body.notes, - createdAt: new Date(), - updatedAt: new Date(), - } - - vendors.set(id, vendor) - - return NextResponse.json( - { - success: true, - data: vendor, - timestamp: new Date().toISOString(), - }, - { status: 201 } - ) - } catch (error) { - console.error('Error creating vendor:', error) - return NextResponse.json( - { success: false, error: 'Failed to create vendor' }, - { status: 500 } - ) - } -} diff --git a/ai-compliance-sdk/cmd/server/main.go b/ai-compliance-sdk/cmd/server/main.go index 476c843..2910b61 100644 --- a/ai-compliance-sdk/cmd/server/main.go +++ b/ai-compliance-sdk/cmd/server/main.go @@ -648,7 +648,9 @@ func main() { incidentRoutes.GET("/stats", incidentHandlers.GetStatistics) } - // Vendor Compliance routes - Vendor Management & AVV/DPA (DSGVO Art. 28) + // DEPRECATED: Vendor Compliance routes — Python backend is now Source of Truth. + // Frontend proxies to backend-compliance:8002/api/compliance/vendor-compliance/* + // These Go routes remain registered but should not be extended. vendorRoutes := v1.Group("/vendors") { // Vendor CRUD diff --git a/ai-compliance-sdk/docs/ARCHITECTURE.md b/ai-compliance-sdk/docs/ARCHITECTURE.md index 876c419..ae9a7f4 100644 --- a/ai-compliance-sdk/docs/ARCHITECTURE.md +++ b/ai-compliance-sdk/docs/ARCHITECTURE.md @@ -525,10 +525,10 @@ ai-compliance-sdk: environment: - DATABASE_URL=postgres://... - OLLAMA_URL=http://ollama:11434 - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=https://qdrant-dev.breakpilot.ai + - QDRANT_API_KEY=${QDRANT_API_KEY} depends_on: - postgres - - qdrant ``` ### 9.2 Abhängigkeiten diff --git a/ai-compliance-sdk/internal/api/handlers/vendor_handlers.go b/ai-compliance-sdk/internal/api/handlers/vendor_handlers.go index 98a3fd7..bae38ff 100644 --- a/ai-compliance-sdk/internal/api/handlers/vendor_handlers.go +++ b/ai-compliance-sdk/internal/api/handlers/vendor_handlers.go @@ -1,3 +1,8 @@ +// DEPRECATED: Vendor Compliance handlers are superseded by the Python backend +// (backend-compliance/compliance/api/vendor_compliance_routes.py). +// Frontend now routes through /api/sdk/v1/vendor-compliance → backend-compliance:8002. +// These Go handlers remain for backward compatibility but should not be extended. + package handlers import ( diff --git a/backend-compliance/compliance/api/__init__.py b/backend-compliance/compliance/api/__init__.py index b2e593c..90c52ee 100644 --- a/backend-compliance/compliance/api/__init__.py +++ b/backend-compliance/compliance/api/__init__.py @@ -27,6 +27,7 @@ from .email_template_routes import router as email_template_router from .banner_routes import router as banner_router from .extraction_routes import router as extraction_router from .tom_routes import router as tom_router +from .vendor_compliance_routes import router as vendor_compliance_router # Include sub-routers router.include_router(audit_router) @@ -55,6 +56,7 @@ router.include_router(email_template_router) router.include_router(banner_router) router.include_router(extraction_router) router.include_router(tom_router) +router.include_router(vendor_compliance_router) __all__ = [ "router", @@ -83,4 +85,5 @@ __all__ = [ "email_template_router", "banner_router", "tom_router", + "vendor_compliance_router", ] diff --git a/backend-compliance/compliance/api/vendor_compliance_routes.py b/backend-compliance/compliance/api/vendor_compliance_routes.py new file mode 100644 index 0000000..4e6392d --- /dev/null +++ b/backend-compliance/compliance/api/vendor_compliance_routes.py @@ -0,0 +1,1107 @@ +""" +FastAPI routes for Vendor Compliance — Auftragsverarbeitung (Art. 28 DSGVO). + +Endpoints: + Vendors (7): + GET /vendor-compliance/vendors — Liste + Filter + GET /vendor-compliance/vendors/stats — Statistiken + GET /vendor-compliance/vendors/{id} — Detail + POST /vendor-compliance/vendors — Erstellen + PUT /vendor-compliance/vendors/{id} — Update + DELETE /vendor-compliance/vendors/{id} — Loeschen + PATCH /vendor-compliance/vendors/{id}/status — Status aendern + + Contracts (5): + GET /vendor-compliance/contracts — Liste + GET /vendor-compliance/contracts/{id} — Detail + POST /vendor-compliance/contracts — Erstellen + PUT /vendor-compliance/contracts/{id} — Update + DELETE /vendor-compliance/contracts/{id} — Loeschen + + Findings (5): + GET /vendor-compliance/findings — Liste + GET /vendor-compliance/findings/{id} — Detail + POST /vendor-compliance/findings — Erstellen + PUT /vendor-compliance/findings/{id} — Update + DELETE /vendor-compliance/findings/{id} — Loeschen + + Control Instances (5): + GET /vendor-compliance/control-instances — Liste + GET /vendor-compliance/control-instances/{id} — Detail + POST /vendor-compliance/control-instances — Erstellen + PUT /vendor-compliance/control-instances/{id} — Update + DELETE /vendor-compliance/control-instances/{id} — Loeschen + + Controls Library (3): + GET /vendor-compliance/controls — Alle Controls + POST /vendor-compliance/controls — Erstellen + DELETE /vendor-compliance/controls/{id} — Loeschen + + Export Stubs (3): + POST /vendor-compliance/export — 501 + GET /vendor-compliance/export/{id} — 501 + GET /vendor-compliance/export/{id}/download — 501 + +DB tables (Go Migration 011, schema: vendor_vendors, vendor_contracts, +vendor_findings, vendor_control_instances). +""" + +import json +import logging +import uuid +from datetime import datetime +from typing import Optional, List + +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel +from sqlalchemy import text +from sqlalchemy.orm import Session + +from classroom_engine.database import get_db + +logger = logging.getLogger(__name__) +router = APIRouter(prefix="/vendor-compliance", tags=["vendor-compliance"]) + +DEFAULT_TENANT_ID = "default" + +# ============================================================================= +# Helpers +# ============================================================================= + +def _now_iso() -> str: + return datetime.utcnow().isoformat() + "Z" + + +def _ok(data, status_code: int = 200): + """Wrap response in {success, data, timestamp} envelope.""" + return {"success": True, "data": data, "timestamp": _now_iso()} + + +def _parse_json(val, default=None): + """Parse a JSONB/TEXT field → Python object.""" + if val is None: + return default if default is not None else None + if isinstance(val, (dict, list)): + return val + if isinstance(val, str): + try: + return json.loads(val) + except Exception: + return default if default is not None else val + return val + + +def _ts(val): + """Timestamp → ISO string or None.""" + if not val: + return None + if isinstance(val, str): + return val + return val.isoformat() + + +def _get(row, key, default=None): + """Safe row access.""" + try: + v = row[key] + return default if v is None and default is not None else v + except (KeyError, IndexError): + return default + + +# camelCase ↔ snake_case conversion maps +_VENDOR_CAMEL_TO_SNAKE = { + # Vendor fields + "legalForm": "legal_form", + "serviceDescription": "service_description", + "serviceCategory": "service_category", + "dataAccessLevel": "data_access_level", + "processingLocations": "processing_locations", + "transferMechanisms": "transfer_mechanisms", + "primaryContact": "primary_contact", + "dpoContact": "dpo_contact", + "securityContact": "security_contact", + "contractTypes": "contract_types", + "inherentRiskScore": "inherent_risk_score", + "residualRiskScore": "residual_risk_score", + "manualRiskAdjustment": "manual_risk_adjustment", + "riskJustification": "risk_justification", + "reviewFrequency": "review_frequency", + "lastReviewDate": "last_review_date", + "nextReviewDate": "next_review_date", + "processingActivityIds": "processing_activity_ids", + "contactName": "contact_name", + "contactEmail": "contact_email", + "contactPhone": "contact_phone", + "contactDepartment": "contact_department", + # Common / cross-entity fields + "tenantId": "tenant_id", + "createdAt": "created_at", + "updatedAt": "updated_at", + "createdBy": "created_by", + "vendorId": "vendor_id", + "contractId": "contract_id", + "controlId": "control_id", + "controlDomain": "control_domain", + "evidenceIds": "evidence_ids", + "lastAssessedAt": "last_assessed_at", + "lastAssessedBy": "last_assessed_by", + "nextAssessmentDate": "next_assessment_date", + # Contract fields + "fileName": "file_name", + "originalName": "original_name", + "mimeType": "mime_type", + "fileSize": "file_size", + "storagePath": "storage_path", + "documentType": "document_type", + "previousVersionId": "previous_version_id", + "effectiveDate": "effective_date", + "expirationDate": "expiration_date", + "autoRenewal": "auto_renewal", + "renewalNoticePeriod": "renewal_notice_period", + "terminationNoticePeriod": "termination_notice_period", + "reviewStatus": "review_status", + "reviewCompletedAt": "review_completed_at", + "complianceScore": "compliance_score", + "extractedText": "extracted_text", + "pageCount": "page_count", + # Finding fields + "findingType": "finding_type", + "dueDate": "due_date", + "resolvedAt": "resolved_at", + "resolvedBy": "resolved_by", +} + +_VENDOR_SNAKE_TO_CAMEL = {v: k for k, v in _VENDOR_CAMEL_TO_SNAKE.items()} + + +def _to_snake(data: dict) -> dict: + """Convert camelCase keys in data to snake_case for DB storage.""" + result = {} + for k, v in data.items(): + snake = _VENDOR_CAMEL_TO_SNAKE.get(k, k) + result[snake] = v + return result + + +def _to_camel(data: dict) -> dict: + """Convert snake_case keys to camelCase for frontend.""" + result = {} + for k, v in data.items(): + camel = _VENDOR_SNAKE_TO_CAMEL.get(k, k) + result[camel] = v + return result + + +# ============================================================================= +# Row → Response converters +# ============================================================================= + +def _vendor_to_response(row) -> dict: + return _to_camel({ + "id": str(row["id"]), + "tenant_id": row["tenant_id"], + "name": row["name"], + "legal_form": _get(row, "legal_form", ""), + "country": _get(row, "country", ""), + "address": _get(row, "address", ""), + "website": _get(row, "website", ""), + "role": _get(row, "role", "PROCESSOR"), + "service_description": _get(row, "service_description", ""), + "service_category": _get(row, "service_category", "OTHER"), + "data_access_level": _get(row, "data_access_level", "NONE"), + "processing_locations": _parse_json(_get(row, "processing_locations"), []), + "transfer_mechanisms": _parse_json(_get(row, "transfer_mechanisms"), []), + "certifications": _parse_json(_get(row, "certifications"), []), + "primary_contact": _parse_json(_get(row, "primary_contact"), {}), + "dpo_contact": _parse_json(_get(row, "dpo_contact"), {}), + "security_contact": _parse_json(_get(row, "security_contact"), {}), + "contract_types": _parse_json(_get(row, "contract_types"), []), + "inherent_risk_score": _get(row, "inherent_risk_score", 50), + "residual_risk_score": _get(row, "residual_risk_score", 50), + "manual_risk_adjustment": _get(row, "manual_risk_adjustment"), + "risk_justification": _get(row, "risk_justification", ""), + "review_frequency": _get(row, "review_frequency", "ANNUAL"), + "last_review_date": _ts(_get(row, "last_review_date")), + "next_review_date": _ts(_get(row, "next_review_date")), + "status": _get(row, "status", "ACTIVE"), + "processing_activity_ids": _parse_json(_get(row, "processing_activity_ids"), []), + "notes": _get(row, "notes", ""), + "contact_name": _get(row, "contact_name", ""), + "contact_email": _get(row, "contact_email", ""), + "contact_phone": _get(row, "contact_phone", ""), + "contact_department": _get(row, "contact_department", ""), + "created_at": _ts(row["created_at"]), + "updated_at": _ts(row["updated_at"]), + "created_by": _get(row, "created_by", "system"), + }) + + +def _contract_to_response(row) -> dict: + return _to_camel({ + "id": str(row["id"]), + "tenant_id": row["tenant_id"], + "vendor_id": str(row["vendor_id"]), + "file_name": _get(row, "file_name", ""), + "original_name": _get(row, "original_name", ""), + "mime_type": _get(row, "mime_type", ""), + "file_size": _get(row, "file_size", 0), + "storage_path": _get(row, "storage_path", ""), + "document_type": _get(row, "document_type", "AVV"), + "version": _get(row, "version", 1), + "previous_version_id": str(_get(row, "previous_version_id")) if _get(row, "previous_version_id") else None, + "parties": _parse_json(_get(row, "parties"), []), + "effective_date": _ts(_get(row, "effective_date")), + "expiration_date": _ts(_get(row, "expiration_date")), + "auto_renewal": _get(row, "auto_renewal", False), + "renewal_notice_period": _get(row, "renewal_notice_period", ""), + "termination_notice_period": _get(row, "termination_notice_period", ""), + "review_status": _get(row, "review_status", "PENDING"), + "review_completed_at": _ts(_get(row, "review_completed_at")), + "compliance_score": _get(row, "compliance_score"), + "status": _get(row, "status", "DRAFT"), + "extracted_text": _get(row, "extracted_text", ""), + "page_count": _get(row, "page_count", 0), + "created_at": _ts(row["created_at"]), + "updated_at": _ts(row["updated_at"]), + "created_by": _get(row, "created_by", "system"), + }) + + +def _finding_to_response(row) -> dict: + return _to_camel({ + "id": str(row["id"]), + "tenant_id": row["tenant_id"], + "vendor_id": str(row["vendor_id"]), + "contract_id": str(_get(row, "contract_id")) if _get(row, "contract_id") else None, + "finding_type": _get(row, "finding_type", "UNKNOWN"), + "category": _get(row, "category", ""), + "severity": _get(row, "severity", "MEDIUM"), + "title": _get(row, "title", ""), + "description": _get(row, "description", ""), + "recommendation": _get(row, "recommendation", ""), + "citations": _parse_json(_get(row, "citations"), []), + "status": _get(row, "status", "OPEN"), + "assignee": _get(row, "assignee", ""), + "due_date": _ts(_get(row, "due_date")), + "resolution": _get(row, "resolution", ""), + "resolved_at": _ts(_get(row, "resolved_at")), + "resolved_by": _get(row, "resolved_by", ""), + "created_at": _ts(row["created_at"]), + "updated_at": _ts(row["updated_at"]), + "created_by": _get(row, "created_by", "system"), + }) + + +def _control_instance_to_response(row) -> dict: + return _to_camel({ + "id": str(row["id"]), + "tenant_id": row["tenant_id"], + "vendor_id": str(row["vendor_id"]), + "control_id": _get(row, "control_id", ""), + "control_domain": _get(row, "control_domain", ""), + "status": _get(row, "status", "PLANNED"), + "evidence_ids": _parse_json(_get(row, "evidence_ids"), []), + "notes": _get(row, "notes", ""), + "last_assessed_at": _ts(_get(row, "last_assessed_at")), + "last_assessed_by": _get(row, "last_assessed_by", ""), + "next_assessment_date": _ts(_get(row, "next_assessment_date")), + "created_at": _ts(row["created_at"]), + "updated_at": _ts(row["updated_at"]), + "created_by": _get(row, "created_by", "system"), + }) + + +# ============================================================================= +# Vendors +# ============================================================================= + +@router.get("/vendors/stats") +def get_vendor_stats( + tenant_id: Optional[str] = Query(None), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + result = db.execute(text(""" + SELECT + COUNT(*) AS total, + COUNT(*) FILTER (WHERE status = 'ACTIVE') AS active, + COUNT(*) FILTER (WHERE status = 'INACTIVE') AS inactive, + COUNT(*) FILTER (WHERE status = 'PENDING_REVIEW') AS pending_review, + COUNT(*) FILTER (WHERE status = 'TERMINATED') AS terminated, + COALESCE(AVG(inherent_risk_score), 0) AS avg_inherent_risk, + COALESCE(AVG(residual_risk_score), 0) AS avg_residual_risk, + COUNT(*) FILTER (WHERE inherent_risk_score >= 75) AS high_risk_count + FROM vendor_vendors + WHERE tenant_id = :tid + """), {"tid": tid}) + row = result.fetchone() + if row is None: + stats = { + "total": 0, "active": 0, "inactive": 0, + "pending_review": 0, "terminated": 0, + "avg_inherent_risk": 0, "avg_residual_risk": 0, + "high_risk_count": 0, + } + else: + stats = { + "total": row["total"] or 0, + "active": row["active"] or 0, + "inactive": row["inactive"] or 0, + "pendingReview": row["pending_review"] or 0, + "terminated": row["terminated"] or 0, + "avgInherentRisk": round(float(row["avg_inherent_risk"] or 0), 1), + "avgResidualRisk": round(float(row["avg_residual_risk"] or 0), 1), + "highRiskCount": row["high_risk_count"] or 0, + } + return _ok(stats) + + +@router.get("/vendors") +def list_vendors( + tenant_id: Optional[str] = Query(None), + status: Optional[str] = Query(None), + risk_level: Optional[str] = Query(None, alias="riskLevel"), + search: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=500), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + where = ["tenant_id = :tid"] + params: dict = {"tid": tid} + + if status: + where.append("status = :status") + params["status"] = status + if risk_level: + if risk_level == "HIGH": + where.append("inherent_risk_score >= 75") + elif risk_level == "MEDIUM": + where.append("inherent_risk_score >= 40 AND inherent_risk_score < 75") + elif risk_level == "LOW": + where.append("inherent_risk_score < 40") + if search: + where.append("(name ILIKE :search OR service_description ILIKE :search)") + params["search"] = f"%{search}%" + + where_clause = " AND ".join(where) + params["lim"] = limit + params["off"] = skip + + rows = db.execute(text(f""" + SELECT * FROM vendor_vendors + WHERE {where_clause} + ORDER BY created_at DESC + LIMIT :lim OFFSET :off + """), params).fetchall() + + count_row = db.execute(text(f""" + SELECT COUNT(*) AS cnt FROM vendor_vendors WHERE {where_clause} + """), {k: v for k, v in params.items() if k not in ("lim", "off")}).fetchone() + total = count_row["cnt"] if count_row else 0 + + return _ok({"items": [_vendor_to_response(r) for r in rows], "total": total}) + + +@router.get("/vendors/{vendor_id}") +def get_vendor(vendor_id: str, db: Session = Depends(get_db)): + row = db.execute(text("SELECT * FROM vendor_vendors WHERE id = :id"), + {"id": vendor_id}).fetchone() + if not row: + raise HTTPException(404, "Vendor not found") + return _ok(_vendor_to_response(row)) + + +@router.post("/vendors", status_code=201) +def create_vendor(body: dict = {}, db: Session = Depends(get_db)): + data = _to_snake(body) + vid = str(uuid.uuid4()) + tid = data.get("tenant_id", DEFAULT_TENANT_ID) + now = datetime.utcnow().isoformat() + + db.execute(text(""" + INSERT INTO vendor_vendors ( + id, tenant_id, name, legal_form, country, address, website, + role, service_description, service_category, data_access_level, + processing_locations, transfer_mechanisms, certifications, + primary_contact, dpo_contact, security_contact, + contract_types, inherent_risk_score, residual_risk_score, + manual_risk_adjustment, risk_justification, + review_frequency, last_review_date, next_review_date, + status, processing_activity_ids, notes, + contact_name, contact_email, contact_phone, contact_department, + created_at, updated_at, created_by + ) VALUES ( + :id, :tenant_id, :name, :legal_form, :country, :address, :website, + :role, :service_description, :service_category, :data_access_level, + CAST(:processing_locations AS jsonb), CAST(:transfer_mechanisms AS jsonb), + CAST(:certifications AS jsonb), + CAST(:primary_contact AS jsonb), CAST(:dpo_contact AS jsonb), + CAST(:security_contact AS jsonb), + CAST(:contract_types AS jsonb), :inherent_risk_score, :residual_risk_score, + :manual_risk_adjustment, :risk_justification, + :review_frequency, :last_review_date, :next_review_date, + :status, CAST(:processing_activity_ids AS jsonb), :notes, + :contact_name, :contact_email, :contact_phone, :contact_department, + :created_at, :updated_at, :created_by + ) + """), { + "id": vid, + "tenant_id": tid, + "name": data.get("name", ""), + "legal_form": data.get("legal_form", ""), + "country": data.get("country", ""), + "address": data.get("address", ""), + "website": data.get("website", ""), + "role": data.get("role", "PROCESSOR"), + "service_description": data.get("service_description", ""), + "service_category": data.get("service_category", "OTHER"), + "data_access_level": data.get("data_access_level", "NONE"), + "processing_locations": json.dumps(data.get("processing_locations", [])), + "transfer_mechanisms": json.dumps(data.get("transfer_mechanisms", [])), + "certifications": json.dumps(data.get("certifications", [])), + "primary_contact": json.dumps(data.get("primary_contact", {})), + "dpo_contact": json.dumps(data.get("dpo_contact", {})), + "security_contact": json.dumps(data.get("security_contact", {})), + "contract_types": json.dumps(data.get("contract_types", [])), + "inherent_risk_score": data.get("inherent_risk_score", 50), + "residual_risk_score": data.get("residual_risk_score", 50), + "manual_risk_adjustment": data.get("manual_risk_adjustment"), + "risk_justification": data.get("risk_justification", ""), + "review_frequency": data.get("review_frequency", "ANNUAL"), + "last_review_date": data.get("last_review_date"), + "next_review_date": data.get("next_review_date"), + "status": data.get("status", "ACTIVE"), + "processing_activity_ids": json.dumps(data.get("processing_activity_ids", [])), + "notes": data.get("notes", ""), + "contact_name": data.get("contact_name", ""), + "contact_email": data.get("contact_email", ""), + "contact_phone": data.get("contact_phone", ""), + "contact_department": data.get("contact_department", ""), + "created_at": now, + "updated_at": now, + "created_by": data.get("created_by", "system"), + }) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_vendors WHERE id = :id"), + {"id": vid}).fetchone() + return _ok(_vendor_to_response(row)) + + +@router.put("/vendors/{vendor_id}") +def update_vendor(vendor_id: str, body: dict = {}, db: Session = Depends(get_db)): + existing = db.execute(text("SELECT id FROM vendor_vendors WHERE id = :id"), + {"id": vendor_id}).fetchone() + if not existing: + raise HTTPException(404, "Vendor not found") + + data = _to_snake(body) + now = datetime.utcnow().isoformat() + + # Build dynamic SET clause + allowed = [ + "name", "legal_form", "country", "address", "website", + "role", "service_description", "service_category", "data_access_level", + "inherent_risk_score", "residual_risk_score", + "manual_risk_adjustment", "risk_justification", + "review_frequency", "last_review_date", "next_review_date", + "status", "notes", + "contact_name", "contact_email", "contact_phone", "contact_department", + ] + jsonb_fields = [ + "processing_locations", "transfer_mechanisms", "certifications", + "primary_contact", "dpo_contact", "security_contact", + "contract_types", "processing_activity_ids", + ] + + sets = ["updated_at = :updated_at"] + params: dict = {"id": vendor_id, "updated_at": now} + + for col in allowed: + if col in data: + sets.append(f"{col} = :{col}") + params[col] = data[col] + + for col in jsonb_fields: + if col in data: + sets.append(f"{col} = CAST(:{col} AS jsonb)") + params[col] = json.dumps(data[col]) + + db.execute(text(f"UPDATE vendor_vendors SET {', '.join(sets)} WHERE id = :id"), params) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_vendors WHERE id = :id"), + {"id": vendor_id}).fetchone() + return _ok(_vendor_to_response(row)) + + +@router.delete("/vendors/{vendor_id}") +def delete_vendor(vendor_id: str, db: Session = Depends(get_db)): + result = db.execute(text("DELETE FROM vendor_vendors WHERE id = :id"), + {"id": vendor_id}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Vendor not found") + return _ok({"deleted": True}) + + +@router.patch("/vendors/{vendor_id}/status") +def patch_vendor_status(vendor_id: str, body: dict = {}, db: Session = Depends(get_db)): + new_status = body.get("status") + if not new_status: + raise HTTPException(400, "status is required") + valid = {"ACTIVE", "INACTIVE", "PENDING_REVIEW", "TERMINATED"} + if new_status not in valid: + raise HTTPException(400, f"Invalid status. Must be one of: {', '.join(sorted(valid))}") + + result = db.execute(text(""" + UPDATE vendor_vendors SET status = :status, updated_at = :now WHERE id = :id + """), {"id": vendor_id, "status": new_status, "now": datetime.utcnow().isoformat()}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Vendor not found") + + row = db.execute(text("SELECT * FROM vendor_vendors WHERE id = :id"), + {"id": vendor_id}).fetchone() + return _ok(_vendor_to_response(row)) + + +# ============================================================================= +# Contracts +# ============================================================================= + +@router.get("/contracts") +def list_contracts( + tenant_id: Optional[str] = Query(None), + vendor_id: Optional[str] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=500), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + where = ["tenant_id = :tid"] + params: dict = {"tid": tid} + + if vendor_id: + where.append("vendor_id = :vendor_id") + params["vendor_id"] = vendor_id + if status: + where.append("status = :status") + params["status"] = status + + where_clause = " AND ".join(where) + params["lim"] = limit + params["off"] = skip + + rows = db.execute(text(f""" + SELECT * FROM vendor_contracts + WHERE {where_clause} + ORDER BY created_at DESC + LIMIT :lim OFFSET :off + """), params).fetchall() + + return _ok([_contract_to_response(r) for r in rows]) + + +@router.get("/contracts/{contract_id}") +def get_contract(contract_id: str, db: Session = Depends(get_db)): + row = db.execute(text("SELECT * FROM vendor_contracts WHERE id = :id"), + {"id": contract_id}).fetchone() + if not row: + raise HTTPException(404, "Contract not found") + return _ok(_contract_to_response(row)) + + +@router.post("/contracts", status_code=201) +def create_contract(body: dict = {}, db: Session = Depends(get_db)): + data = _to_snake(body) + cid = str(uuid.uuid4()) + tid = data.get("tenant_id", DEFAULT_TENANT_ID) + now = datetime.utcnow().isoformat() + + db.execute(text(""" + INSERT INTO vendor_contracts ( + id, tenant_id, vendor_id, file_name, original_name, mime_type, + file_size, storage_path, document_type, version, previous_version_id, + parties, effective_date, expiration_date, + auto_renewal, renewal_notice_period, termination_notice_period, + review_status, status, compliance_score, + extracted_text, page_count, + created_at, updated_at, created_by + ) VALUES ( + :id, :tenant_id, :vendor_id, :file_name, :original_name, :mime_type, + :file_size, :storage_path, :document_type, :version, :previous_version_id, + CAST(:parties AS jsonb), :effective_date, :expiration_date, + :auto_renewal, :renewal_notice_period, :termination_notice_period, + :review_status, :status, :compliance_score, + :extracted_text, :page_count, + :created_at, :updated_at, :created_by + ) + """), { + "id": cid, + "tenant_id": tid, + "vendor_id": data.get("vendor_id", ""), + "file_name": data.get("file_name", ""), + "original_name": data.get("original_name", ""), + "mime_type": data.get("mime_type", ""), + "file_size": data.get("file_size", 0), + "storage_path": data.get("storage_path", ""), + "document_type": data.get("document_type", "AVV"), + "version": data.get("version", 1), + "previous_version_id": data.get("previous_version_id"), + "parties": json.dumps(data.get("parties", [])), + "effective_date": data.get("effective_date"), + "expiration_date": data.get("expiration_date"), + "auto_renewal": data.get("auto_renewal", False), + "renewal_notice_period": data.get("renewal_notice_period", ""), + "termination_notice_period": data.get("termination_notice_period", ""), + "review_status": data.get("review_status", "PENDING"), + "status": data.get("status", "DRAFT"), + "compliance_score": data.get("compliance_score"), + "extracted_text": data.get("extracted_text", ""), + "page_count": data.get("page_count", 0), + "created_at": now, + "updated_at": now, + "created_by": data.get("created_by", "system"), + }) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_contracts WHERE id = :id"), + {"id": cid}).fetchone() + return _ok(_contract_to_response(row)) + + +@router.put("/contracts/{contract_id}") +def update_contract(contract_id: str, body: dict = {}, db: Session = Depends(get_db)): + existing = db.execute(text("SELECT id FROM vendor_contracts WHERE id = :id"), + {"id": contract_id}).fetchone() + if not existing: + raise HTTPException(404, "Contract not found") + + data = _to_snake(body) + now = datetime.utcnow().isoformat() + + allowed = [ + "vendor_id", "file_name", "original_name", "mime_type", "file_size", + "storage_path", "document_type", "version", "previous_version_id", + "effective_date", "expiration_date", "auto_renewal", + "renewal_notice_period", "termination_notice_period", + "review_status", "review_completed_at", "compliance_score", + "status", "extracted_text", "page_count", + ] + jsonb_fields = ["parties"] + + sets = ["updated_at = :updated_at"] + params: dict = {"id": contract_id, "updated_at": now} + + for col in allowed: + if col in data: + sets.append(f"{col} = :{col}") + params[col] = data[col] + + for col in jsonb_fields: + if col in data: + sets.append(f"{col} = CAST(:{col} AS jsonb)") + params[col] = json.dumps(data[col]) + + db.execute(text(f"UPDATE vendor_contracts SET {', '.join(sets)} WHERE id = :id"), params) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_contracts WHERE id = :id"), + {"id": contract_id}).fetchone() + return _ok(_contract_to_response(row)) + + +@router.delete("/contracts/{contract_id}") +def delete_contract(contract_id: str, db: Session = Depends(get_db)): + result = db.execute(text("DELETE FROM vendor_contracts WHERE id = :id"), + {"id": contract_id}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Contract not found") + return _ok({"deleted": True}) + + +# ============================================================================= +# Findings +# ============================================================================= + +@router.get("/findings") +def list_findings( + tenant_id: Optional[str] = Query(None), + vendor_id: Optional[str] = Query(None), + severity: Optional[str] = Query(None), + status: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=500), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + where = ["tenant_id = :tid"] + params: dict = {"tid": tid} + + if vendor_id: + where.append("vendor_id = :vendor_id") + params["vendor_id"] = vendor_id + if severity: + where.append("severity = :severity") + params["severity"] = severity + if status: + where.append("status = :status") + params["status"] = status + + where_clause = " AND ".join(where) + params["lim"] = limit + params["off"] = skip + + rows = db.execute(text(f""" + SELECT * FROM vendor_findings + WHERE {where_clause} + ORDER BY created_at DESC + LIMIT :lim OFFSET :off + """), params).fetchall() + + return _ok([_finding_to_response(r) for r in rows]) + + +@router.get("/findings/{finding_id}") +def get_finding(finding_id: str, db: Session = Depends(get_db)): + row = db.execute(text("SELECT * FROM vendor_findings WHERE id = :id"), + {"id": finding_id}).fetchone() + if not row: + raise HTTPException(404, "Finding not found") + return _ok(_finding_to_response(row)) + + +@router.post("/findings", status_code=201) +def create_finding(body: dict = {}, db: Session = Depends(get_db)): + data = _to_snake(body) + fid = str(uuid.uuid4()) + tid = data.get("tenant_id", DEFAULT_TENANT_ID) + now = datetime.utcnow().isoformat() + + db.execute(text(""" + INSERT INTO vendor_findings ( + id, tenant_id, vendor_id, contract_id, + finding_type, category, severity, + title, description, recommendation, + citations, status, assignee, due_date, + created_at, updated_at, created_by + ) VALUES ( + :id, :tenant_id, :vendor_id, :contract_id, + :finding_type, :category, :severity, + :title, :description, :recommendation, + CAST(:citations AS jsonb), :status, :assignee, :due_date, + :created_at, :updated_at, :created_by + ) + """), { + "id": fid, + "tenant_id": tid, + "vendor_id": data.get("vendor_id", ""), + "contract_id": data.get("contract_id"), + "finding_type": data.get("finding_type", "UNKNOWN"), + "category": data.get("category", ""), + "severity": data.get("severity", "MEDIUM"), + "title": data.get("title", ""), + "description": data.get("description", ""), + "recommendation": data.get("recommendation", ""), + "citations": json.dumps(data.get("citations", [])), + "status": data.get("status", "OPEN"), + "assignee": data.get("assignee", ""), + "due_date": data.get("due_date"), + "created_at": now, + "updated_at": now, + "created_by": data.get("created_by", "system"), + }) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_findings WHERE id = :id"), + {"id": fid}).fetchone() + return _ok(_finding_to_response(row)) + + +@router.put("/findings/{finding_id}") +def update_finding(finding_id: str, body: dict = {}, db: Session = Depends(get_db)): + existing = db.execute(text("SELECT id FROM vendor_findings WHERE id = :id"), + {"id": finding_id}).fetchone() + if not existing: + raise HTTPException(404, "Finding not found") + + data = _to_snake(body) + now = datetime.utcnow().isoformat() + + allowed = [ + "vendor_id", "contract_id", "finding_type", "category", "severity", + "title", "description", "recommendation", + "status", "assignee", "due_date", + "resolution", "resolved_at", "resolved_by", + ] + jsonb_fields = ["citations"] + + sets = ["updated_at = :updated_at"] + params: dict = {"id": finding_id, "updated_at": now} + + for col in allowed: + if col in data: + sets.append(f"{col} = :{col}") + params[col] = data[col] + + for col in jsonb_fields: + if col in data: + sets.append(f"{col} = CAST(:{col} AS jsonb)") + params[col] = json.dumps(data[col]) + + db.execute(text(f"UPDATE vendor_findings SET {', '.join(sets)} WHERE id = :id"), params) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_findings WHERE id = :id"), + {"id": finding_id}).fetchone() + return _ok(_finding_to_response(row)) + + +@router.delete("/findings/{finding_id}") +def delete_finding(finding_id: str, db: Session = Depends(get_db)): + result = db.execute(text("DELETE FROM vendor_findings WHERE id = :id"), + {"id": finding_id}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Finding not found") + return _ok({"deleted": True}) + + +# ============================================================================= +# Control Instances +# ============================================================================= + +@router.get("/control-instances") +def list_control_instances( + tenant_id: Optional[str] = Query(None), + vendor_id: Optional[str] = Query(None), + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=500), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + where = ["tenant_id = :tid"] + params: dict = {"tid": tid} + + if vendor_id: + where.append("vendor_id = :vendor_id") + params["vendor_id"] = vendor_id + + where_clause = " AND ".join(where) + params["lim"] = limit + params["off"] = skip + + rows = db.execute(text(f""" + SELECT * FROM vendor_control_instances + WHERE {where_clause} + ORDER BY created_at DESC + LIMIT :lim OFFSET :off + """), params).fetchall() + + return _ok([_control_instance_to_response(r) for r in rows]) + + +@router.get("/control-instances/{instance_id}") +def get_control_instance(instance_id: str, db: Session = Depends(get_db)): + row = db.execute(text("SELECT * FROM vendor_control_instances WHERE id = :id"), + {"id": instance_id}).fetchone() + if not row: + raise HTTPException(404, "Control instance not found") + return _ok(_control_instance_to_response(row)) + + +@router.post("/control-instances", status_code=201) +def create_control_instance(body: dict = {}, db: Session = Depends(get_db)): + data = _to_snake(body) + ciid = str(uuid.uuid4()) + tid = data.get("tenant_id", DEFAULT_TENANT_ID) + now = datetime.utcnow().isoformat() + + db.execute(text(""" + INSERT INTO vendor_control_instances ( + id, tenant_id, vendor_id, control_id, control_domain, + status, evidence_ids, notes, + last_assessed_at, last_assessed_by, next_assessment_date, + created_at, updated_at, created_by + ) VALUES ( + :id, :tenant_id, :vendor_id, :control_id, :control_domain, + :status, CAST(:evidence_ids AS jsonb), :notes, + :last_assessed_at, :last_assessed_by, :next_assessment_date, + :created_at, :updated_at, :created_by + ) + """), { + "id": ciid, + "tenant_id": tid, + "vendor_id": data.get("vendor_id", ""), + "control_id": data.get("control_id", ""), + "control_domain": data.get("control_domain", ""), + "status": data.get("status", "PLANNED"), + "evidence_ids": json.dumps(data.get("evidence_ids", [])), + "notes": data.get("notes", ""), + "last_assessed_at": data.get("last_assessed_at"), + "last_assessed_by": data.get("last_assessed_by", ""), + "next_assessment_date": data.get("next_assessment_date"), + "created_at": now, + "updated_at": now, + "created_by": data.get("created_by", "system"), + }) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_control_instances WHERE id = :id"), + {"id": ciid}).fetchone() + return _ok(_control_instance_to_response(row)) + + +@router.put("/control-instances/{instance_id}") +def update_control_instance(instance_id: str, body: dict = {}, db: Session = Depends(get_db)): + existing = db.execute(text("SELECT id FROM vendor_control_instances WHERE id = :id"), + {"id": instance_id}).fetchone() + if not existing: + raise HTTPException(404, "Control instance not found") + + data = _to_snake(body) + now = datetime.utcnow().isoformat() + + allowed = [ + "vendor_id", "control_id", "control_domain", + "status", "notes", + "last_assessed_at", "last_assessed_by", "next_assessment_date", + ] + jsonb_fields = ["evidence_ids"] + + sets = ["updated_at = :updated_at"] + params: dict = {"id": instance_id, "updated_at": now} + + for col in allowed: + if col in data: + sets.append(f"{col} = :{col}") + params[col] = data[col] + + for col in jsonb_fields: + if col in data: + sets.append(f"{col} = CAST(:{col} AS jsonb)") + params[col] = json.dumps(data[col]) + + db.execute(text(f"UPDATE vendor_control_instances SET {', '.join(sets)} WHERE id = :id"), params) + db.commit() + + row = db.execute(text("SELECT * FROM vendor_control_instances WHERE id = :id"), + {"id": instance_id}).fetchone() + return _ok(_control_instance_to_response(row)) + + +@router.delete("/control-instances/{instance_id}") +def delete_control_instance(instance_id: str, db: Session = Depends(get_db)): + result = db.execute(text("DELETE FROM vendor_control_instances WHERE id = :id"), + {"id": instance_id}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Control instance not found") + return _ok({"deleted": True}) + + +# ============================================================================= +# Controls Library (vendor_compliance_controls — lightweight catalog) +# ============================================================================= + +@router.get("/controls") +def list_controls( + tenant_id: Optional[str] = Query(None), + domain: Optional[str] = Query(None), + db: Session = Depends(get_db), +): + tid = tenant_id or DEFAULT_TENANT_ID + where = ["tenant_id = :tid"] + params: dict = {"tid": tid} + + if domain: + where.append("domain = :domain") + params["domain"] = domain + + where_clause = " AND ".join(where) + + rows = db.execute(text(f""" + SELECT * FROM vendor_compliance_controls + WHERE {where_clause} + ORDER BY domain, control_code + """), params).fetchall() + + items = [] + for r in rows: + items.append({ + "id": str(r["id"]), + "tenantId": r["tenant_id"], + "domain": _get(r, "domain", ""), + "controlCode": _get(r, "control_code", ""), + "title": _get(r, "title", ""), + "description": _get(r, "description", ""), + "createdAt": _ts(r["created_at"]), + }) + + return _ok(items) + + +@router.post("/controls", status_code=201) +def create_control(body: dict = {}, db: Session = Depends(get_db)): + cid = str(uuid.uuid4()) + tid = body.get("tenantId", body.get("tenant_id", DEFAULT_TENANT_ID)) + now = datetime.utcnow().isoformat() + + db.execute(text(""" + INSERT INTO vendor_compliance_controls ( + id, tenant_id, domain, control_code, title, description, created_at + ) VALUES (:id, :tenant_id, :domain, :control_code, :title, :description, :created_at) + """), { + "id": cid, + "tenant_id": tid, + "domain": body.get("domain", ""), + "control_code": body.get("controlCode", body.get("control_code", "")), + "title": body.get("title", ""), + "description": body.get("description", ""), + "created_at": now, + }) + db.commit() + + return _ok({ + "id": cid, + "tenantId": tid, + "domain": body.get("domain", ""), + "controlCode": body.get("controlCode", body.get("control_code", "")), + "title": body.get("title", ""), + "description": body.get("description", ""), + "createdAt": now, + }) + + +@router.delete("/controls/{control_id}") +def delete_control(control_id: str, db: Session = Depends(get_db)): + result = db.execute(text("DELETE FROM vendor_compliance_controls WHERE id = :id"), + {"id": control_id}) + db.commit() + if result.rowcount == 0: + raise HTTPException(404, "Control not found") + return _ok({"deleted": True}) + + +# ============================================================================= +# Export Stubs (501 Not Implemented) +# ============================================================================= + +@router.post("/export", status_code=501) +def export_report(): + return {"success": False, "error": "Export not implemented yet", "timestamp": _now_iso()} + + +@router.get("/export/{report_id}", status_code=501) +def get_export(report_id: str): + return {"success": False, "error": "Export not implemented yet", "timestamp": _now_iso()} + + +@router.get("/export/{report_id}/download", status_code=501) +def download_export(report_id: str): + return {"success": False, "error": "Export not implemented yet", "timestamp": _now_iso()} diff --git a/backend-compliance/tests/test_vendor_compliance_routes.py b/backend-compliance/tests/test_vendor_compliance_routes.py new file mode 100644 index 0000000..5d8b327 --- /dev/null +++ b/backend-compliance/tests/test_vendor_compliance_routes.py @@ -0,0 +1,724 @@ +"""Tests for Vendor Compliance routes (vendor_compliance_routes.py). + +Includes: + - Vendors: CRUD (5) + Stats (1) + Status-Patch (1) + Filter (2) + - Contracts: CRUD (5) + Filter (1) + - Findings: CRUD (5) + Filter (2) + - Control Instances: CRUD (5) + Filter (1) + - Controls Library: List + Create + Delete (3) + - Export Stubs: 3 × 501 + - Response-Format: success/data/timestamp wrapper (2) + - camelCase/snake_case round-trip (2) +""" + +import pytest +import uuid +import os +import sys +from datetime import datetime + +from fastapi import FastAPI +from fastapi.testclient import TestClient +from sqlalchemy import create_engine, text, event +from sqlalchemy.orm import sessionmaker + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) + +from classroom_engine.database import get_db +from compliance.api.vendor_compliance_routes import router as vendor_compliance_router + +# ============================================================================= +# Test App + SQLite Setup +# ============================================================================= + +SQLALCHEMY_DATABASE_URL = "sqlite:///./test_vendor_compliance.db" +engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) +_RawSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +TENANT_ID = "default" + + +@event.listens_for(engine, "connect") +def _register_sqlite_functions(dbapi_conn, connection_record): + dbapi_conn.create_function("NOW", 0, lambda: datetime.utcnow().isoformat()) + + +class _DictRow(dict): + pass + + +class _DictSession: + def __init__(self, session): + self._session = session + + def execute(self, stmt, params=None): + import re + if hasattr(stmt, 'text'): + rewritten = re.sub(r'CAST\((:[\w]+)\s+AS\s+jsonb\)', r'\1', stmt.text) + # Remove FILTER (WHERE ...) for SQLite — replace with CASE/SUM + # Simple approach: rewrite COUNT(*) FILTER (WHERE cond) → SUM(CASE WHEN cond THEN 1 ELSE 0 END) + filter_re = r'COUNT\(\*\)\s+FILTER\s*\(\s*WHERE\s+([^)]+)\)' + rewritten = re.sub(filter_re, r'SUM(CASE WHEN \1 THEN 1 ELSE 0 END)', rewritten) + # ILIKE → LIKE for SQLite + rewritten = rewritten.replace(' ILIKE ', ' LIKE ') + if rewritten != stmt.text: + stmt = text(rewritten) + result = self._session.execute(stmt, params) + return _DictResult(result) + + def flush(self): + self._session.flush() + + def commit(self): + self._session.commit() + + def rollback(self): + self._session.rollback() + + def close(self): + self._session.close() + + +class _DictResult: + def __init__(self, result): + self._result = result + try: + self._keys = list(result.keys()) + self._returns_rows = True + except Exception: + self._keys = [] + self._returns_rows = False + + def fetchone(self): + if not self._returns_rows: + return None + row = self._result.fetchone() + if row is None: + return None + return _DictRow(zip(self._keys, row)) + + def fetchall(self): + if not self._returns_rows: + return [] + rows = self._result.fetchall() + return [_DictRow(zip(self._keys, r)) for r in rows] + + @property + def rowcount(self): + return self._result.rowcount + + +app = FastAPI() +app.include_router(vendor_compliance_router, prefix="/api/compliance") + + +def override_get_db(): + session = _RawSessionLocal() + db = _DictSession(session) + try: + yield db + finally: + db.close() + + +app.dependency_overrides[get_db] = override_get_db +client = TestClient(app) + + +# ============================================================================= +# SQLite Table Creation +# ============================================================================= + +CREATE_VENDORS = """ +CREATE TABLE IF NOT EXISTS vendor_vendors ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL DEFAULT 'default', + name TEXT NOT NULL DEFAULT '', + legal_form TEXT DEFAULT '', + country TEXT DEFAULT '', + address TEXT DEFAULT '', + website TEXT DEFAULT '', + role TEXT DEFAULT 'PROCESSOR', + service_description TEXT DEFAULT '', + service_category TEXT DEFAULT 'OTHER', + data_access_level TEXT DEFAULT 'NONE', + processing_locations TEXT DEFAULT '[]', + transfer_mechanisms TEXT DEFAULT '[]', + certifications TEXT DEFAULT '[]', + primary_contact TEXT DEFAULT '{}', + dpo_contact TEXT DEFAULT '{}', + security_contact TEXT DEFAULT '{}', + contract_types TEXT DEFAULT '[]', + inherent_risk_score INTEGER DEFAULT 50, + residual_risk_score INTEGER DEFAULT 50, + manual_risk_adjustment INTEGER, + risk_justification TEXT DEFAULT '', + review_frequency TEXT DEFAULT 'ANNUAL', + last_review_date TIMESTAMP, + next_review_date TIMESTAMP, + status TEXT DEFAULT 'ACTIVE', + processing_activity_ids TEXT DEFAULT '[]', + notes TEXT DEFAULT '', + contact_name TEXT DEFAULT '', + contact_email TEXT DEFAULT '', + contact_phone TEXT DEFAULT '', + contact_department TEXT DEFAULT '', + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + created_by TEXT DEFAULT 'system' +) +""" + +CREATE_CONTRACTS = """ +CREATE TABLE IF NOT EXISTS vendor_contracts ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL DEFAULT 'default', + vendor_id TEXT NOT NULL DEFAULT '', + file_name TEXT DEFAULT '', + original_name TEXT DEFAULT '', + mime_type TEXT DEFAULT '', + file_size INTEGER DEFAULT 0, + storage_path TEXT DEFAULT '', + document_type TEXT DEFAULT 'AVV', + version INTEGER DEFAULT 1, + previous_version_id TEXT, + parties TEXT DEFAULT '[]', + effective_date TIMESTAMP, + expiration_date TIMESTAMP, + auto_renewal INTEGER DEFAULT 0, + renewal_notice_period TEXT DEFAULT '', + termination_notice_period TEXT DEFAULT '', + review_status TEXT DEFAULT 'PENDING', + review_completed_at TIMESTAMP, + compliance_score INTEGER, + status TEXT DEFAULT 'DRAFT', + extracted_text TEXT DEFAULT '', + page_count INTEGER DEFAULT 0, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + created_by TEXT DEFAULT 'system' +) +""" + +CREATE_FINDINGS = """ +CREATE TABLE IF NOT EXISTS vendor_findings ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL DEFAULT 'default', + vendor_id TEXT NOT NULL DEFAULT '', + contract_id TEXT, + finding_type TEXT DEFAULT 'UNKNOWN', + category TEXT DEFAULT '', + severity TEXT DEFAULT 'MEDIUM', + title TEXT DEFAULT '', + description TEXT DEFAULT '', + recommendation TEXT DEFAULT '', + citations TEXT DEFAULT '[]', + status TEXT DEFAULT 'OPEN', + assignee TEXT DEFAULT '', + due_date TIMESTAMP, + resolution TEXT DEFAULT '', + resolved_at TIMESTAMP, + resolved_by TEXT DEFAULT '', + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + created_by TEXT DEFAULT 'system' +) +""" + +CREATE_CONTROL_INSTANCES = """ +CREATE TABLE IF NOT EXISTS vendor_control_instances ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL DEFAULT 'default', + vendor_id TEXT NOT NULL DEFAULT '', + control_id TEXT DEFAULT '', + control_domain TEXT DEFAULT '', + status TEXT DEFAULT 'PLANNED', + evidence_ids TEXT DEFAULT '[]', + notes TEXT DEFAULT '', + last_assessed_at TIMESTAMP, + last_assessed_by TEXT DEFAULT '', + next_assessment_date TIMESTAMP, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + created_by TEXT DEFAULT 'system' +) +""" + +CREATE_CONTROLS = """ +CREATE TABLE IF NOT EXISTS vendor_compliance_controls ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL DEFAULT 'default', + domain TEXT DEFAULT '', + control_code TEXT DEFAULT '', + title TEXT DEFAULT '', + description TEXT DEFAULT '', + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +) +""" + + +def _setup_tables(): + with engine.connect() as conn: + for sql in [CREATE_VENDORS, CREATE_CONTRACTS, CREATE_FINDINGS, + CREATE_CONTROL_INSTANCES, CREATE_CONTROLS]: + conn.execute(text(sql)) + conn.commit() + + +def _teardown_tables(): + with engine.connect() as conn: + for t in ["vendor_vendors", "vendor_contracts", "vendor_findings", + "vendor_control_instances", "vendor_compliance_controls"]: + conn.execute(text(f"DELETE FROM {t}")) + conn.commit() + + +_setup_tables() + + +# ============================================================================= +# Fixtures +# ============================================================================= + +@pytest.fixture(autouse=True) +def clean_tables(): + _teardown_tables() + yield + _teardown_tables() + + +def _create_vendor(**kwargs): + payload = { + "name": kwargs.get("name", "Test Vendor GmbH"), + "country": "DE", + "role": "PROCESSOR", + "serviceCategory": "HOSTING", + "status": kwargs.get("status", "ACTIVE"), + "inherentRiskScore": kwargs.get("inherentRiskScore", 50), + } + payload.update(kwargs) + resp = client.post("/api/compliance/vendor-compliance/vendors", json=payload) + assert resp.status_code == 201 + return resp.json()["data"] + + +def _create_contract(vendor_id, **kwargs): + payload = { + "vendorId": vendor_id, + "documentType": "AVV", + "fileName": "avv-test.pdf", + "status": "DRAFT", + } + payload.update(kwargs) + resp = client.post("/api/compliance/vendor-compliance/contracts", json=payload) + assert resp.status_code == 201 + return resp.json()["data"] + + +def _create_finding(vendor_id, **kwargs): + payload = { + "vendorId": vendor_id, + "findingType": "GAP", + "severity": "HIGH", + "title": "Missing TOM Annex", + "status": "OPEN", + } + payload.update(kwargs) + resp = client.post("/api/compliance/vendor-compliance/findings", json=payload) + assert resp.status_code == 201 + return resp.json()["data"] + + +def _create_control_instance(vendor_id, **kwargs): + payload = { + "vendorId": vendor_id, + "controlId": "C-001", + "controlDomain": "priv", + "status": "PASS", + } + payload.update(kwargs) + resp = client.post("/api/compliance/vendor-compliance/control-instances", json=payload) + assert resp.status_code == 201 + return resp.json()["data"] + + +# ============================================================================= +# Response Format Tests +# ============================================================================= + +class TestResponseFormat: + def test_list_vendors_has_success_data_timestamp(self): + resp = client.get("/api/compliance/vendor-compliance/vendors") + assert resp.status_code == 200 + body = resp.json() + assert body["success"] is True + assert "data" in body + assert "timestamp" in body + + def test_create_vendor_has_success_data_timestamp(self): + resp = client.post("/api/compliance/vendor-compliance/vendors", json={"name": "Test"}) + assert resp.status_code == 201 + body = resp.json() + assert body["success"] is True + assert "data" in body + assert body["data"]["name"] == "Test" + assert "timestamp" in body + + +# ============================================================================= +# camelCase / snake_case Round-Trip Tests +# ============================================================================= + +class TestCamelSnakeConversion: + def test_create_with_camel_returns_camel(self): + vendor = _create_vendor( + name="CamelTest", + legalForm="GmbH", + serviceDescription="Cloud hosting", + dataAccessLevel="CONTENT", + inherentRiskScore=80, + ) + assert vendor["legalForm"] == "GmbH" + assert vendor["serviceDescription"] == "Cloud hosting" + assert vendor["dataAccessLevel"] == "CONTENT" + assert vendor["inherentRiskScore"] == 80 + + def test_round_trip_preserves_values(self): + vendor = _create_vendor( + name="RoundTrip", + processingLocations=["DE", "US"], + primaryContact={"name": "Max", "email": "max@test.de"}, + ) + vid = vendor["id"] + resp = client.get(f"/api/compliance/vendor-compliance/vendors/{vid}") + assert resp.status_code == 200 + fetched = resp.json()["data"] + assert fetched["processingLocations"] == ["DE", "US"] + assert fetched["primaryContact"]["name"] == "Max" + + +# ============================================================================= +# Vendor Tests +# ============================================================================= + +class TestVendorsCRUD: + def test_list_empty(self): + resp = client.get("/api/compliance/vendor-compliance/vendors") + assert resp.status_code == 200 + data = resp.json()["data"] + assert data["items"] == [] + assert data["total"] == 0 + + def test_create_vendor(self): + vendor = _create_vendor(name="Hetzner GmbH") + assert vendor["name"] == "Hetzner GmbH" + assert "id" in vendor + + def test_get_vendor(self): + vendor = _create_vendor() + resp = client.get(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["id"] == vendor["id"] + + def test_update_vendor(self): + vendor = _create_vendor() + resp = client.put( + f"/api/compliance/vendor-compliance/vendors/{vendor['id']}", + json={"name": "Updated Name", "country": "AT"} + ) + assert resp.status_code == 200 + updated = resp.json()["data"] + assert updated["name"] == "Updated Name" + assert updated["country"] == "AT" + + def test_delete_vendor(self): + vendor = _create_vendor() + resp = client.delete(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["deleted"] is True + resp2 = client.get(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}") + assert resp2.status_code == 404 + + def test_get_nonexistent_vendor_404(self): + resp = client.get(f"/api/compliance/vendor-compliance/vendors/{uuid.uuid4()}") + assert resp.status_code == 404 + + def test_delete_nonexistent_vendor_404(self): + resp = client.delete(f"/api/compliance/vendor-compliance/vendors/{uuid.uuid4()}") + assert resp.status_code == 404 + + +class TestVendorStats: + def test_stats_empty(self): + resp = client.get("/api/compliance/vendor-compliance/vendors/stats") + assert resp.status_code == 200 + stats = resp.json()["data"] + assert stats["total"] == 0 + + def test_stats_with_vendors(self): + _create_vendor(name="V1", status="ACTIVE", inherentRiskScore=80) + _create_vendor(name="V2", status="INACTIVE", inherentRiskScore=30) + _create_vendor(name="V3", status="PENDING_REVIEW", inherentRiskScore=90) + resp = client.get("/api/compliance/vendor-compliance/vendors/stats") + stats = resp.json()["data"] + assert stats["total"] == 3 + assert stats["active"] == 1 + assert stats["inactive"] == 1 + assert stats["pendingReview"] == 1 + assert stats["highRiskCount"] == 2 # 80 and 90 + + +class TestVendorStatusPatch: + def test_patch_status(self): + vendor = _create_vendor(status="ACTIVE") + resp = client.patch( + f"/api/compliance/vendor-compliance/vendors/{vendor['id']}/status", + json={"status": "TERMINATED"} + ) + assert resp.status_code == 200 + assert resp.json()["data"]["status"] == "TERMINATED" + + def test_patch_invalid_status_400(self): + vendor = _create_vendor() + resp = client.patch( + f"/api/compliance/vendor-compliance/vendors/{vendor['id']}/status", + json={"status": "INVALID"} + ) + assert resp.status_code == 400 + + +class TestVendorFilter: + def test_filter_by_status(self): + _create_vendor(name="Active1", status="ACTIVE") + _create_vendor(name="Inactive1", status="INACTIVE") + resp = client.get("/api/compliance/vendor-compliance/vendors?status=ACTIVE") + items = resp.json()["data"]["items"] + assert len(items) == 1 + assert items[0]["name"] == "Active1" + + def test_filter_by_search(self): + _create_vendor(name="Hetzner Online GmbH") + _create_vendor(name="AWS Deutschland") + resp = client.get("/api/compliance/vendor-compliance/vendors?search=Hetzner") + items = resp.json()["data"]["items"] + assert len(items) == 1 + assert "Hetzner" in items[0]["name"] + + +# ============================================================================= +# Contract Tests +# ============================================================================= + +class TestContractsCRUD: + def test_list_contracts_empty(self): + resp = client.get("/api/compliance/vendor-compliance/contracts") + assert resp.status_code == 200 + assert resp.json()["data"] == [] + + def test_create_contract(self): + vendor = _create_vendor() + contract = _create_contract(vendor["id"]) + assert contract["vendorId"] == vendor["id"] + assert contract["documentType"] == "AVV" + + def test_get_contract(self): + vendor = _create_vendor() + contract = _create_contract(vendor["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/contracts/{contract['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["id"] == contract["id"] + + def test_update_contract(self): + vendor = _create_vendor() + contract = _create_contract(vendor["id"]) + resp = client.put( + f"/api/compliance/vendor-compliance/contracts/{contract['id']}", + json={"status": "ACTIVE", "complianceScore": 85} + ) + assert resp.status_code == 200 + updated = resp.json()["data"] + assert updated["status"] == "ACTIVE" + assert updated["complianceScore"] == 85 + + def test_delete_contract(self): + vendor = _create_vendor() + contract = _create_contract(vendor["id"]) + resp = client.delete(f"/api/compliance/vendor-compliance/contracts/{contract['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["deleted"] is True + + +class TestContractFilter: + def test_filter_by_vendor_id(self): + v1 = _create_vendor(name="V1") + v2 = _create_vendor(name="V2") + _create_contract(v1["id"]) + _create_contract(v1["id"]) + _create_contract(v2["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/contracts?vendor_id={v1['id']}") + assert len(resp.json()["data"]) == 2 + + +# ============================================================================= +# Finding Tests +# ============================================================================= + +class TestFindingsCRUD: + def test_list_findings_empty(self): + resp = client.get("/api/compliance/vendor-compliance/findings") + assert resp.status_code == 200 + assert resp.json()["data"] == [] + + def test_create_finding(self): + vendor = _create_vendor() + finding = _create_finding(vendor["id"]) + assert finding["vendorId"] == vendor["id"] + assert finding["severity"] == "HIGH" + + def test_get_finding(self): + vendor = _create_vendor() + finding = _create_finding(vendor["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/findings/{finding['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["title"] == "Missing TOM Annex" + + def test_update_finding(self): + vendor = _create_vendor() + finding = _create_finding(vendor["id"]) + resp = client.put( + f"/api/compliance/vendor-compliance/findings/{finding['id']}", + json={"status": "RESOLVED", "resolution": "TOM annex added"} + ) + assert resp.status_code == 200 + updated = resp.json()["data"] + assert updated["status"] == "RESOLVED" + assert updated["resolution"] == "TOM annex added" + + def test_delete_finding(self): + vendor = _create_vendor() + finding = _create_finding(vendor["id"]) + resp = client.delete(f"/api/compliance/vendor-compliance/findings/{finding['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["deleted"] is True + + +class TestFindingFilter: + def test_filter_by_severity(self): + vendor = _create_vendor() + _create_finding(vendor["id"], severity="HIGH") + _create_finding(vendor["id"], severity="LOW") + resp = client.get("/api/compliance/vendor-compliance/findings?severity=HIGH") + assert len(resp.json()["data"]) == 1 + + def test_filter_by_vendor_id(self): + v1 = _create_vendor(name="V1") + v2 = _create_vendor(name="V2") + _create_finding(v1["id"]) + _create_finding(v2["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/findings?vendor_id={v1['id']}") + assert len(resp.json()["data"]) == 1 + + +# ============================================================================= +# Control Instance Tests +# ============================================================================= + +class TestControlInstancesCRUD: + def test_list_control_instances_empty(self): + resp = client.get("/api/compliance/vendor-compliance/control-instances") + assert resp.status_code == 200 + assert resp.json()["data"] == [] + + def test_create_control_instance(self): + vendor = _create_vendor() + ci = _create_control_instance(vendor["id"]) + assert ci["vendorId"] == vendor["id"] + assert ci["controlId"] == "C-001" + assert ci["status"] == "PASS" + + def test_get_control_instance(self): + vendor = _create_vendor() + ci = _create_control_instance(vendor["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/control-instances/{ci['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["controlDomain"] == "priv" + + def test_update_control_instance(self): + vendor = _create_vendor() + ci = _create_control_instance(vendor["id"]) + resp = client.put( + f"/api/compliance/vendor-compliance/control-instances/{ci['id']}", + json={"status": "FAIL", "notes": "Needs remediation"} + ) + assert resp.status_code == 200 + updated = resp.json()["data"] + assert updated["status"] == "FAIL" + assert updated["notes"] == "Needs remediation" + + def test_delete_control_instance(self): + vendor = _create_vendor() + ci = _create_control_instance(vendor["id"]) + resp = client.delete(f"/api/compliance/vendor-compliance/control-instances/{ci['id']}") + assert resp.status_code == 200 + assert resp.json()["data"]["deleted"] is True + + +class TestControlInstanceFilter: + def test_filter_by_vendor_id(self): + v1 = _create_vendor(name="V1") + v2 = _create_vendor(name="V2") + _create_control_instance(v1["id"]) + _create_control_instance(v2["id"]) + resp = client.get(f"/api/compliance/vendor-compliance/control-instances?vendor_id={v1['id']}") + assert len(resp.json()["data"]) == 1 + + +# ============================================================================= +# Controls Library Tests +# ============================================================================= + +class TestControlsLibrary: + def test_list_controls_empty(self): + resp = client.get("/api/compliance/vendor-compliance/controls") + assert resp.status_code == 200 + assert resp.json()["data"] == [] + + def test_create_control(self): + resp = client.post("/api/compliance/vendor-compliance/controls", json={ + "domain": "priv", + "controlCode": "PRIV-001", + "title": "Datenschutz-Folgenabschaetzung", + "description": "Art. 35 DSGVO Compliance" + }) + assert resp.status_code == 201 + ctrl = resp.json()["data"] + assert ctrl["domain"] == "priv" + assert ctrl["controlCode"] == "PRIV-001" + + def test_delete_control(self): + resp = client.post("/api/compliance/vendor-compliance/controls", json={ + "domain": "iam", "controlCode": "IAM-001", "title": "Access Control" + }) + ctrl_id = resp.json()["data"]["id"] + resp2 = client.delete(f"/api/compliance/vendor-compliance/controls/{ctrl_id}") + assert resp2.status_code == 200 + assert resp2.json()["data"]["deleted"] is True + + +# ============================================================================= +# Export Stub Tests +# ============================================================================= + +class TestExportStubs: + def test_post_export_501(self): + resp = client.post("/api/compliance/vendor-compliance/export", json={}) + assert resp.status_code == 501 + assert resp.json()["success"] is False + + def test_get_export_501(self): + resp = client.get(f"/api/compliance/vendor-compliance/export/{uuid.uuid4()}") + assert resp.status_code == 501 + + def test_download_export_501(self): + resp = client.get(f"/api/compliance/vendor-compliance/export/{uuid.uuid4()}/download") + assert resp.status_code == 501 diff --git a/docs-src/development/ci-cd-pipeline.md b/docs-src/development/ci-cd-pipeline.md index b6d991e..c8256be 100644 --- a/docs-src/development/ci-cd-pipeline.md +++ b/docs-src/development/ci-cd-pipeline.md @@ -48,8 +48,8 @@ │ ├── docs (Port 8009) │ │ ├── postgres │ │ ├── valkey (Redis) │ -│ ├── qdrant │ -│ └── minio │ +│ ├── qdrant (extern: qdrant-dev.breakpilot.ai) │ +│ └── object-storage (extern: nbg1.your-objectstorage.com) │ │ │ └─────────────────────────────────────────────────────────────────┘ ``` diff --git a/docs-src/index.md b/docs-src/index.md index 0c7f28c..8643d4b 100644 --- a/docs-src/index.md +++ b/docs-src/index.md @@ -139,8 +139,8 @@ Compliance-Services nutzen folgende Core-Infrastruktur: | PostgreSQL (5432) | Alle | Zentrale Datenbank | | Valkey (6379) | Backend, Admin | Session Cache | | Vault (8200) | Alle | Secrets Management | -| Qdrant (6333) | AI SDK, Document Crawler | Vector-Suche | -| MinIO (9000) | Document Crawler | Datei-Storage | +| Qdrant (qdrant-dev.breakpilot.ai) | AI SDK, Document Crawler | Vector-Suche (gehostet, API-Key) | +| Hetzner Object Storage | TTS Service, Document Crawler | Datei-Storage (S3-kompatibel) | | Embedding (8087) | AI SDK | Text-Embeddings | | RAG Service (8097) | AI SDK | Retrieval Augmented Generation | | Nginx | Alle | HTTPS Reverse Proxy | diff --git a/docs-src/services/ai-compliance-sdk/ARCHITECTURE.md b/docs-src/services/ai-compliance-sdk/ARCHITECTURE.md index 4b5cef2..dcaec78 100644 --- a/docs-src/services/ai-compliance-sdk/ARCHITECTURE.md +++ b/docs-src/services/ai-compliance-sdk/ARCHITECTURE.md @@ -560,10 +560,10 @@ ai-compliance-sdk: environment: - DATABASE_URL=postgres://... - OLLAMA_URL=http://ollama:11434 - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=https://qdrant-dev.breakpilot.ai + - QDRANT_API_KEY=${QDRANT_API_KEY} depends_on: - postgres - - qdrant ``` ### 9.2 Abhängigkeiten diff --git a/docs-src/services/sdk-modules/training.md b/docs-src/services/sdk-modules/training.md index d5ed72b..b150a9b 100644 --- a/docs-src/services/sdk-modules/training.md +++ b/docs-src/services/sdk-modules/training.md @@ -128,7 +128,7 @@ KI-generierte Inhalte werden via `compliance-tts-service` (Port 8095) in Audio u - **Audio:** Piper TTS → MP3 (Modell: `de_DE-thorsten-high.onnx`) - **Video:** FFmpeg → MP4 (Skript + Stimme + Untertitel) -- **Storage:** MinIO (`bp-core-minio:9000`) +- **Storage:** Hetzner Object Storage (`nbg1.your-objectstorage.com`, S3-kompatibel) ``` AudioPlayer → /sdk/v1/training/modules/:id/media (audio)