docs: Qdrant und MinIO/Object-Storage Referenzen aktualisieren
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 35s
CI / test-python-backend-compliance (push) Successful in 32s
CI / test-python-document-crawler (push) Successful in 41s
CI / test-python-dsms-gateway (push) Successful in 19s
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 35s
CI / test-python-backend-compliance (push) Successful in 32s
CI / test-python-document-crawler (push) Successful in 41s
CI / test-python-dsms-gateway (push) Successful in 19s
- Qdrant: lokaler Container → qdrant-dev.breakpilot.ai (gehostet, API-Key) - MinIO: bp-core-minio → Hetzner Object Storage (nbg1.your-objectstorage.com) - CLAUDE.md, MkDocs, ARCHITECTURE.md, training.md, ci-cd-pipeline.md Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -109,7 +109,7 @@ Pruefen: `curl -sf http://macmini:8099/health`
|
||||
|
||||
### compliance-tts-service
|
||||
- Piper TTS + FFmpeg fuer Schulungsvideos
|
||||
- Speichert Audio/Video in MinIO (bp-core-minio:9000)
|
||||
- Speichert Audio/Video in Hetzner Object Storage (nbg1.your-objectstorage.com)
|
||||
- TTS-Modell: `de_DE-thorsten-high.onnx`
|
||||
- Dateien: `main.py`, `tts_engine.py`, `video_generator.py`, `storage.py`
|
||||
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* Vendor Compliance API Proxy - Catch-all route
|
||||
* Proxies all /api/sdk/v1/vendor-compliance/* requests to backend-compliance
|
||||
*
|
||||
* Backend routes: vendors, contracts, findings, control-instances, controls, export
|
||||
* All under /api/compliance/vendor-compliance/ prefix on backend-compliance:8002
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://backend-compliance:8002'
|
||||
|
||||
async function proxyRequest(
|
||||
request: NextRequest,
|
||||
pathSegments: string[] | undefined,
|
||||
method: string
|
||||
) {
|
||||
const pathStr = pathSegments?.join('/') || ''
|
||||
const searchParams = request.nextUrl.searchParams.toString()
|
||||
const basePath = `${BACKEND_URL}/api/compliance/vendor-compliance`
|
||||
const url = pathStr
|
||||
? `${basePath}/${pathStr}${searchParams ? `?${searchParams}` : ''}`
|
||||
: `${basePath}${searchParams ? `?${searchParams}` : ''}`
|
||||
|
||||
try {
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
const headerNames = ['authorization', 'x-namespace-id', 'x-tenant-slug']
|
||||
for (const name of headerNames) {
|
||||
const value = request.headers.get(name)
|
||||
if (value) {
|
||||
headers[name] = value
|
||||
}
|
||||
}
|
||||
|
||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
|
||||
const clientUserId = request.headers.get('x-user-id')
|
||||
const clientTenantId = request.headers.get('x-tenant-id')
|
||||
headers['X-User-ID'] = (clientUserId && uuidRegex.test(clientUserId)) ? clientUserId : '00000000-0000-0000-0000-000000000001'
|
||||
headers['X-Tenant-ID'] = (clientTenantId && uuidRegex.test(clientTenantId)) ? clientTenantId : (process.env.DEFAULT_TENANT_ID || '9282a473-5c95-4b3a-bf78-0ecc0ec71d3e')
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
method,
|
||||
headers,
|
||||
signal: AbortSignal.timeout(60000),
|
||||
}
|
||||
|
||||
if (method === 'POST' || method === 'PUT' || method === 'PATCH') {
|
||||
const body = await request.text()
|
||||
if (body) {
|
||||
fetchOptions.body = body
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(url, fetchOptions)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorJson
|
||||
try {
|
||||
errorJson = JSON.parse(errorText)
|
||||
} catch {
|
||||
errorJson = { error: errorText }
|
||||
}
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, ...errorJson },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Vendor Compliance API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Compliance Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path?: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
return proxyRequest(request, path, 'GET')
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path?: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
return proxyRequest(request, path, 'POST')
|
||||
}
|
||||
|
||||
export async function PUT(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path?: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
return proxyRequest(request, path, 'PUT')
|
||||
}
|
||||
|
||||
export async function PATCH(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path?: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
return proxyRequest(request, path, 'PATCH')
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path?: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
return proxyRequest(request, path, 'DELETE')
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { ContractDocument } from '@/lib/sdk/vendor-compliance'
|
||||
|
||||
// In-memory storage for demo purposes
|
||||
const contracts: Map<string, ContractDocument> = new Map()
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const contractList = Array.from(contracts.values())
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: contractList,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching contracts:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch contracts' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Handle multipart form data for file upload
|
||||
const formData = await request.formData()
|
||||
const file = formData.get('file') as File | null
|
||||
const vendorId = formData.get('vendorId') as string
|
||||
const metadataStr = formData.get('metadata') as string
|
||||
|
||||
if (!file || !vendorId) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'File and vendorId are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadata = metadataStr ? JSON.parse(metadataStr) : {}
|
||||
const id = uuidv4()
|
||||
|
||||
// In production, upload file to storage (MinIO, S3, etc.)
|
||||
const storagePath = `contracts/${id}/${file.name}`
|
||||
|
||||
const contract: ContractDocument = {
|
||||
id,
|
||||
tenantId: 'default',
|
||||
vendorId,
|
||||
fileName: `${id}-${file.name}`,
|
||||
originalName: file.name,
|
||||
mimeType: file.type,
|
||||
fileSize: file.size,
|
||||
storagePath,
|
||||
documentType: metadata.documentType || 'OTHER',
|
||||
version: metadata.version || '1.0',
|
||||
previousVersionId: metadata.previousVersionId,
|
||||
parties: metadata.parties,
|
||||
effectiveDate: metadata.effectiveDate ? new Date(metadata.effectiveDate) : undefined,
|
||||
expirationDate: metadata.expirationDate ? new Date(metadata.expirationDate) : undefined,
|
||||
autoRenewal: metadata.autoRenewal,
|
||||
renewalNoticePeriod: metadata.renewalNoticePeriod,
|
||||
terminationNoticePeriod: metadata.terminationNoticePeriod,
|
||||
reviewStatus: 'PENDING',
|
||||
status: 'DRAFT',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
contracts.set(id, contract)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: contract,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
{ status: 201 }
|
||||
)
|
||||
} catch (error) {
|
||||
console.error('Error uploading contract:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to upload contract' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { CONTROLS_LIBRARY } from '@/lib/sdk/vendor-compliance'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const searchParams = request.nextUrl.searchParams
|
||||
const domain = searchParams.get('domain')
|
||||
|
||||
let controls = [...CONTROLS_LIBRARY]
|
||||
|
||||
// Filter by domain if provided
|
||||
if (domain) {
|
||||
controls = controls.filter((c) => c.domain === domain)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: controls,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching controls:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch controls' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* GET /api/sdk/v1/vendor-compliance/export/[reportId]/download
|
||||
*
|
||||
* Download a generated report file.
|
||||
* In production, this would redirect to a signed MinIO/S3 URL or stream the file.
|
||||
*/
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ reportId: string }> }
|
||||
) {
|
||||
const { reportId } = await params
|
||||
|
||||
// TODO: Implement actual file download
|
||||
// This would typically:
|
||||
// 1. Verify report exists and user has access
|
||||
// 2. Generate signed URL for MinIO/S3
|
||||
// 3. Redirect to signed URL or stream file
|
||||
|
||||
// For now, return a placeholder PDF
|
||||
const placeholderContent = `
|
||||
%PDF-1.4
|
||||
1 0 obj
|
||||
<< /Type /Catalog /Pages 2 0 R >>
|
||||
endobj
|
||||
2 0 obj
|
||||
<< /Type /Pages /Kids [3 0 R] /Count 1 >>
|
||||
endobj
|
||||
3 0 obj
|
||||
<< /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R /Resources << /Font << /F1 5 0 R >> >> >>
|
||||
endobj
|
||||
4 0 obj
|
||||
<< /Length 200 >>
|
||||
stream
|
||||
BT
|
||||
/F1 24 Tf
|
||||
100 700 Td
|
||||
(Vendor Compliance Report) Tj
|
||||
/F1 12 Tf
|
||||
100 650 Td
|
||||
(Report ID: ${reportId}) Tj
|
||||
100 620 Td
|
||||
(Generated: ${new Date().toISOString()}) Tj
|
||||
100 580 Td
|
||||
(This is a placeholder. Implement actual report generation.) Tj
|
||||
ET
|
||||
endstream
|
||||
endobj
|
||||
5 0 obj
|
||||
<< /Type /Font /Subtype /Type1 /BaseFont /Helvetica >>
|
||||
endobj
|
||||
xref
|
||||
0 6
|
||||
0000000000 65535 f
|
||||
0000000009 00000 n
|
||||
0000000058 00000 n
|
||||
0000000115 00000 n
|
||||
0000000266 00000 n
|
||||
0000000519 00000 n
|
||||
trailer
|
||||
<< /Size 6 /Root 1 0 R >>
|
||||
startxref
|
||||
598
|
||||
%%EOF
|
||||
`.trim()
|
||||
|
||||
// Return as PDF
|
||||
return new NextResponse(placeholderContent, {
|
||||
headers: {
|
||||
'Content-Type': 'application/pdf',
|
||||
'Content-Disposition': `attachment; filename="Report_${reportId.slice(0, 8)}.pdf"`,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* GET /api/sdk/v1/vendor-compliance/export/[reportId]
|
||||
*
|
||||
* Get report metadata by ID.
|
||||
*/
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ reportId: string }> }
|
||||
) {
|
||||
const { reportId } = await params
|
||||
|
||||
// TODO: Fetch report metadata from database
|
||||
// For now, return mock data
|
||||
|
||||
return NextResponse.json({
|
||||
id: reportId,
|
||||
status: 'completed',
|
||||
filename: `Report_${reportId.slice(0, 8)}.pdf`,
|
||||
generatedAt: new Date().toISOString(),
|
||||
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), // 24h
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/sdk/v1/vendor-compliance/export/[reportId]
|
||||
*
|
||||
* Delete a generated report.
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ reportId: string }> }
|
||||
) {
|
||||
const { reportId } = await params
|
||||
|
||||
// TODO: Delete report from storage and database
|
||||
console.log('Deleting report:', reportId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
deletedId: reportId,
|
||||
})
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
/**
|
||||
* POST /api/sdk/v1/vendor-compliance/export
|
||||
*
|
||||
* Generate and export reports in various formats.
|
||||
* Currently returns mock data - integrate with actual report generation service.
|
||||
*/
|
||||
|
||||
interface ExportConfig {
|
||||
reportType: 'VVT_EXPORT' | 'VENDOR_AUDIT' | 'ROPA' | 'MANAGEMENT_SUMMARY' | 'DPIA_INPUT'
|
||||
format: 'PDF' | 'DOCX' | 'XLSX' | 'JSON'
|
||||
scope: {
|
||||
vendorIds: string[]
|
||||
processingActivityIds: string[]
|
||||
includeFindings: boolean
|
||||
includeControls: boolean
|
||||
includeRiskAssessment: boolean
|
||||
dateRange?: {
|
||||
from: string
|
||||
to: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const REPORT_TYPE_NAMES: Record<ExportConfig['reportType'], string> = {
|
||||
VVT_EXPORT: 'Verarbeitungsverzeichnis',
|
||||
VENDOR_AUDIT: 'Vendor-Audit-Pack',
|
||||
ROPA: 'RoPA',
|
||||
MANAGEMENT_SUMMARY: 'Management-Summary',
|
||||
DPIA_INPUT: 'DSFA-Input',
|
||||
}
|
||||
|
||||
const FORMAT_EXTENSIONS: Record<ExportConfig['format'], string> = {
|
||||
PDF: 'pdf',
|
||||
DOCX: 'docx',
|
||||
XLSX: 'xlsx',
|
||||
JSON: 'json',
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const config = (await request.json()) as ExportConfig
|
||||
|
||||
// Validate request
|
||||
if (!config.reportType || !config.format) {
|
||||
return NextResponse.json(
|
||||
{ error: 'reportType and format are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Generate report ID and filename
|
||||
const reportId = uuidv4()
|
||||
const timestamp = new Date().toISOString().slice(0, 10).replace(/-/g, '')
|
||||
const filename = `${REPORT_TYPE_NAMES[config.reportType]}_${timestamp}.${FORMAT_EXTENSIONS[config.format]}`
|
||||
|
||||
// TODO: Implement actual report generation
|
||||
// This would typically:
|
||||
// 1. Fetch data from database based on scope
|
||||
// 2. Generate report using template engine (e.g., docx-templates, pdfkit)
|
||||
// 3. Store in MinIO/S3
|
||||
// 4. Return download URL
|
||||
|
||||
// Mock implementation - simulate processing time
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
|
||||
// In production, this would be a signed URL to MinIO/S3
|
||||
const downloadUrl = `/api/sdk/v1/vendor-compliance/export/${reportId}/download`
|
||||
|
||||
// Log export for audit trail
|
||||
console.log('Export generated:', {
|
||||
reportId,
|
||||
reportType: config.reportType,
|
||||
format: config.format,
|
||||
scope: config.scope,
|
||||
filename,
|
||||
generatedAt: new Date().toISOString(),
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
id: reportId,
|
||||
reportType: config.reportType,
|
||||
format: config.format,
|
||||
filename,
|
||||
downloadUrl,
|
||||
generatedAt: new Date().toISOString(),
|
||||
scope: {
|
||||
vendorCount: config.scope.vendorIds?.length || 0,
|
||||
activityCount: config.scope.processingActivityIds?.length || 0,
|
||||
includesFindings: config.scope.includeFindings,
|
||||
includesControls: config.scope.includeControls,
|
||||
includesRiskAssessment: config.scope.includeRiskAssessment,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Export error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to generate export' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/sdk/v1/vendor-compliance/export
|
||||
*
|
||||
* List recent exports for the current tenant.
|
||||
*/
|
||||
export async function GET() {
|
||||
// TODO: Implement fetching recent exports from database
|
||||
// For now, return empty list
|
||||
return NextResponse.json({
|
||||
exports: [],
|
||||
totalCount: 0,
|
||||
})
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { Finding } from '@/lib/sdk/vendor-compliance'
|
||||
|
||||
// In-memory storage for demo purposes
|
||||
const findings: Map<string, Finding> = new Map()
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const searchParams = request.nextUrl.searchParams
|
||||
const vendorId = searchParams.get('vendorId')
|
||||
const contractId = searchParams.get('contractId')
|
||||
const status = searchParams.get('status')
|
||||
|
||||
let findingsList = Array.from(findings.values())
|
||||
|
||||
// Filter by vendor
|
||||
if (vendorId) {
|
||||
findingsList = findingsList.filter((f) => f.vendorId === vendorId)
|
||||
}
|
||||
|
||||
// Filter by contract
|
||||
if (contractId) {
|
||||
findingsList = findingsList.filter((f) => f.contractId === contractId)
|
||||
}
|
||||
|
||||
// Filter by status
|
||||
if (status) {
|
||||
findingsList = findingsList.filter((f) => f.status === status)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: findingsList,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching findings:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch findings' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
// This would reference the same storage as the main route
|
||||
// In production, this would be database calls
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const { id } = await params
|
||||
|
||||
// In production, fetch from database
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: null, // Would return the activity
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching processing activity:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch processing activity' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const { id } = await params
|
||||
const body = await request.json()
|
||||
|
||||
// In production, update in database
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { id, ...body, updatedAt: new Date() },
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error updating processing activity:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to update processing activity' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const { id } = await params
|
||||
|
||||
// In production, delete from database
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error deleting processing activity:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to delete processing activity' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { ProcessingActivity, generateVVTId } from '@/lib/sdk/vendor-compliance'
|
||||
|
||||
// In-memory storage for demo purposes
|
||||
// In production, this would be replaced with database calls
|
||||
const processingActivities: Map<string, ProcessingActivity> = new Map()
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const activities = Array.from(processingActivities.values())
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: activities,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching processing activities:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch processing activities' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
// Generate IDs
|
||||
const id = uuidv4()
|
||||
const existingIds = Array.from(processingActivities.values()).map((a) => a.vvtId)
|
||||
const vvtId = body.vvtId || generateVVTId(existingIds)
|
||||
|
||||
const activity: ProcessingActivity = {
|
||||
id,
|
||||
tenantId: 'default', // Would come from auth context
|
||||
vvtId,
|
||||
name: body.name,
|
||||
responsible: body.responsible,
|
||||
dpoContact: body.dpoContact,
|
||||
purposes: body.purposes || [],
|
||||
dataSubjectCategories: body.dataSubjectCategories || [],
|
||||
personalDataCategories: body.personalDataCategories || [],
|
||||
recipientCategories: body.recipientCategories || [],
|
||||
thirdCountryTransfers: body.thirdCountryTransfers || [],
|
||||
retentionPeriod: body.retentionPeriod || { description: { de: '', en: '' } },
|
||||
technicalMeasures: body.technicalMeasures || [],
|
||||
legalBasis: body.legalBasis || [],
|
||||
dataSources: body.dataSources || [],
|
||||
systems: body.systems || [],
|
||||
dataFlows: body.dataFlows || [],
|
||||
protectionLevel: body.protectionLevel || 'MEDIUM',
|
||||
dpiaRequired: body.dpiaRequired || false,
|
||||
dpiaJustification: body.dpiaJustification,
|
||||
subProcessors: body.subProcessors || [],
|
||||
legalRetentionBasis: body.legalRetentionBasis,
|
||||
status: body.status || 'DRAFT',
|
||||
owner: body.owner || '',
|
||||
lastReviewDate: body.lastReviewDate,
|
||||
nextReviewDate: body.nextReviewDate,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
processingActivities.set(id, activity)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: activity,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
{ status: 201 }
|
||||
)
|
||||
} catch (error) {
|
||||
console.error('Error creating processing activity:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to create processing activity' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { Vendor } from '@/lib/sdk/vendor-compliance'
|
||||
|
||||
// In-memory storage for demo purposes
|
||||
const vendors: Map<string, Vendor> = new Map()
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const vendorList = Array.from(vendors.values())
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: vendorList,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching vendors:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch vendors' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const id = uuidv4()
|
||||
|
||||
const vendor: Vendor = {
|
||||
id,
|
||||
tenantId: 'default',
|
||||
name: body.name,
|
||||
legalForm: body.legalForm,
|
||||
country: body.country,
|
||||
address: body.address,
|
||||
website: body.website,
|
||||
role: body.role,
|
||||
serviceDescription: body.serviceDescription,
|
||||
serviceCategory: body.serviceCategory,
|
||||
dataAccessLevel: body.dataAccessLevel || 'NONE',
|
||||
processingLocations: body.processingLocations || [],
|
||||
transferMechanisms: body.transferMechanisms || [],
|
||||
certifications: body.certifications || [],
|
||||
primaryContact: body.primaryContact,
|
||||
dpoContact: body.dpoContact,
|
||||
securityContact: body.securityContact,
|
||||
contractTypes: body.contractTypes || [],
|
||||
contracts: body.contracts || [],
|
||||
inherentRiskScore: body.inherentRiskScore || 50,
|
||||
residualRiskScore: body.residualRiskScore || 50,
|
||||
manualRiskAdjustment: body.manualRiskAdjustment,
|
||||
riskJustification: body.riskJustification,
|
||||
reviewFrequency: body.reviewFrequency || 'ANNUAL',
|
||||
lastReviewDate: body.lastReviewDate,
|
||||
nextReviewDate: body.nextReviewDate,
|
||||
status: body.status || 'ACTIVE',
|
||||
processingActivityIds: body.processingActivityIds || [],
|
||||
notes: body.notes,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
vendors.set(id, vendor)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: vendor,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
{ status: 201 }
|
||||
)
|
||||
} catch (error) {
|
||||
console.error('Error creating vendor:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to create vendor' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -648,7 +648,9 @@ func main() {
|
||||
incidentRoutes.GET("/stats", incidentHandlers.GetStatistics)
|
||||
}
|
||||
|
||||
// Vendor Compliance routes - Vendor Management & AVV/DPA (DSGVO Art. 28)
|
||||
// DEPRECATED: Vendor Compliance routes — Python backend is now Source of Truth.
|
||||
// Frontend proxies to backend-compliance:8002/api/compliance/vendor-compliance/*
|
||||
// These Go routes remain registered but should not be extended.
|
||||
vendorRoutes := v1.Group("/vendors")
|
||||
{
|
||||
// Vendor CRUD
|
||||
|
||||
@@ -525,10 +525,10 @@ ai-compliance-sdk:
|
||||
environment:
|
||||
- DATABASE_URL=postgres://...
|
||||
- OLLAMA_URL=http://ollama:11434
|
||||
- QDRANT_URL=http://qdrant:6333
|
||||
- QDRANT_URL=https://qdrant-dev.breakpilot.ai
|
||||
- QDRANT_API_KEY=${QDRANT_API_KEY}
|
||||
depends_on:
|
||||
- postgres
|
||||
- qdrant
|
||||
```
|
||||
|
||||
### 9.2 Abhängigkeiten
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
// DEPRECATED: Vendor Compliance handlers are superseded by the Python backend
|
||||
// (backend-compliance/compliance/api/vendor_compliance_routes.py).
|
||||
// Frontend now routes through /api/sdk/v1/vendor-compliance → backend-compliance:8002.
|
||||
// These Go handlers remain for backward compatibility but should not be extended.
|
||||
|
||||
package handlers
|
||||
|
||||
import (
|
||||
|
||||
@@ -27,6 +27,7 @@ from .email_template_routes import router as email_template_router
|
||||
from .banner_routes import router as banner_router
|
||||
from .extraction_routes import router as extraction_router
|
||||
from .tom_routes import router as tom_router
|
||||
from .vendor_compliance_routes import router as vendor_compliance_router
|
||||
|
||||
# Include sub-routers
|
||||
router.include_router(audit_router)
|
||||
@@ -55,6 +56,7 @@ router.include_router(email_template_router)
|
||||
router.include_router(banner_router)
|
||||
router.include_router(extraction_router)
|
||||
router.include_router(tom_router)
|
||||
router.include_router(vendor_compliance_router)
|
||||
|
||||
__all__ = [
|
||||
"router",
|
||||
@@ -83,4 +85,5 @@ __all__ = [
|
||||
"email_template_router",
|
||||
"banner_router",
|
||||
"tom_router",
|
||||
"vendor_compliance_router",
|
||||
]
|
||||
|
||||
1107
backend-compliance/compliance/api/vendor_compliance_routes.py
Normal file
1107
backend-compliance/compliance/api/vendor_compliance_routes.py
Normal file
File diff suppressed because it is too large
Load Diff
724
backend-compliance/tests/test_vendor_compliance_routes.py
Normal file
724
backend-compliance/tests/test_vendor_compliance_routes.py
Normal file
@@ -0,0 +1,724 @@
|
||||
"""Tests for Vendor Compliance routes (vendor_compliance_routes.py).
|
||||
|
||||
Includes:
|
||||
- Vendors: CRUD (5) + Stats (1) + Status-Patch (1) + Filter (2)
|
||||
- Contracts: CRUD (5) + Filter (1)
|
||||
- Findings: CRUD (5) + Filter (2)
|
||||
- Control Instances: CRUD (5) + Filter (1)
|
||||
- Controls Library: List + Create + Delete (3)
|
||||
- Export Stubs: 3 × 501
|
||||
- Response-Format: success/data/timestamp wrapper (2)
|
||||
- camelCase/snake_case round-trip (2)
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import uuid
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine, text, event
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from classroom_engine.database import get_db
|
||||
from compliance.api.vendor_compliance_routes import router as vendor_compliance_router
|
||||
|
||||
# =============================================================================
|
||||
# Test App + SQLite Setup
|
||||
# =============================================================================
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///./test_vendor_compliance.db"
|
||||
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False})
|
||||
_RawSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
TENANT_ID = "default"
|
||||
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def _register_sqlite_functions(dbapi_conn, connection_record):
|
||||
dbapi_conn.create_function("NOW", 0, lambda: datetime.utcnow().isoformat())
|
||||
|
||||
|
||||
class _DictRow(dict):
|
||||
pass
|
||||
|
||||
|
||||
class _DictSession:
|
||||
def __init__(self, session):
|
||||
self._session = session
|
||||
|
||||
def execute(self, stmt, params=None):
|
||||
import re
|
||||
if hasattr(stmt, 'text'):
|
||||
rewritten = re.sub(r'CAST\((:[\w]+)\s+AS\s+jsonb\)', r'\1', stmt.text)
|
||||
# Remove FILTER (WHERE ...) for SQLite — replace with CASE/SUM
|
||||
# Simple approach: rewrite COUNT(*) FILTER (WHERE cond) → SUM(CASE WHEN cond THEN 1 ELSE 0 END)
|
||||
filter_re = r'COUNT\(\*\)\s+FILTER\s*\(\s*WHERE\s+([^)]+)\)'
|
||||
rewritten = re.sub(filter_re, r'SUM(CASE WHEN \1 THEN 1 ELSE 0 END)', rewritten)
|
||||
# ILIKE → LIKE for SQLite
|
||||
rewritten = rewritten.replace(' ILIKE ', ' LIKE ')
|
||||
if rewritten != stmt.text:
|
||||
stmt = text(rewritten)
|
||||
result = self._session.execute(stmt, params)
|
||||
return _DictResult(result)
|
||||
|
||||
def flush(self):
|
||||
self._session.flush()
|
||||
|
||||
def commit(self):
|
||||
self._session.commit()
|
||||
|
||||
def rollback(self):
|
||||
self._session.rollback()
|
||||
|
||||
def close(self):
|
||||
self._session.close()
|
||||
|
||||
|
||||
class _DictResult:
|
||||
def __init__(self, result):
|
||||
self._result = result
|
||||
try:
|
||||
self._keys = list(result.keys())
|
||||
self._returns_rows = True
|
||||
except Exception:
|
||||
self._keys = []
|
||||
self._returns_rows = False
|
||||
|
||||
def fetchone(self):
|
||||
if not self._returns_rows:
|
||||
return None
|
||||
row = self._result.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return _DictRow(zip(self._keys, row))
|
||||
|
||||
def fetchall(self):
|
||||
if not self._returns_rows:
|
||||
return []
|
||||
rows = self._result.fetchall()
|
||||
return [_DictRow(zip(self._keys, r)) for r in rows]
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
return self._result.rowcount
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(vendor_compliance_router, prefix="/api/compliance")
|
||||
|
||||
|
||||
def override_get_db():
|
||||
session = _RawSessionLocal()
|
||||
db = _DictSession(session)
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SQLite Table Creation
|
||||
# =============================================================================
|
||||
|
||||
CREATE_VENDORS = """
|
||||
CREATE TABLE IF NOT EXISTS vendor_vendors (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL DEFAULT 'default',
|
||||
name TEXT NOT NULL DEFAULT '',
|
||||
legal_form TEXT DEFAULT '',
|
||||
country TEXT DEFAULT '',
|
||||
address TEXT DEFAULT '',
|
||||
website TEXT DEFAULT '',
|
||||
role TEXT DEFAULT 'PROCESSOR',
|
||||
service_description TEXT DEFAULT '',
|
||||
service_category TEXT DEFAULT 'OTHER',
|
||||
data_access_level TEXT DEFAULT 'NONE',
|
||||
processing_locations TEXT DEFAULT '[]',
|
||||
transfer_mechanisms TEXT DEFAULT '[]',
|
||||
certifications TEXT DEFAULT '[]',
|
||||
primary_contact TEXT DEFAULT '{}',
|
||||
dpo_contact TEXT DEFAULT '{}',
|
||||
security_contact TEXT DEFAULT '{}',
|
||||
contract_types TEXT DEFAULT '[]',
|
||||
inherent_risk_score INTEGER DEFAULT 50,
|
||||
residual_risk_score INTEGER DEFAULT 50,
|
||||
manual_risk_adjustment INTEGER,
|
||||
risk_justification TEXT DEFAULT '',
|
||||
review_frequency TEXT DEFAULT 'ANNUAL',
|
||||
last_review_date TIMESTAMP,
|
||||
next_review_date TIMESTAMP,
|
||||
status TEXT DEFAULT 'ACTIVE',
|
||||
processing_activity_ids TEXT DEFAULT '[]',
|
||||
notes TEXT DEFAULT '',
|
||||
contact_name TEXT DEFAULT '',
|
||||
contact_email TEXT DEFAULT '',
|
||||
contact_phone TEXT DEFAULT '',
|
||||
contact_department TEXT DEFAULT '',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by TEXT DEFAULT 'system'
|
||||
)
|
||||
"""
|
||||
|
||||
CREATE_CONTRACTS = """
|
||||
CREATE TABLE IF NOT EXISTS vendor_contracts (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL DEFAULT 'default',
|
||||
vendor_id TEXT NOT NULL DEFAULT '',
|
||||
file_name TEXT DEFAULT '',
|
||||
original_name TEXT DEFAULT '',
|
||||
mime_type TEXT DEFAULT '',
|
||||
file_size INTEGER DEFAULT 0,
|
||||
storage_path TEXT DEFAULT '',
|
||||
document_type TEXT DEFAULT 'AVV',
|
||||
version INTEGER DEFAULT 1,
|
||||
previous_version_id TEXT,
|
||||
parties TEXT DEFAULT '[]',
|
||||
effective_date TIMESTAMP,
|
||||
expiration_date TIMESTAMP,
|
||||
auto_renewal INTEGER DEFAULT 0,
|
||||
renewal_notice_period TEXT DEFAULT '',
|
||||
termination_notice_period TEXT DEFAULT '',
|
||||
review_status TEXT DEFAULT 'PENDING',
|
||||
review_completed_at TIMESTAMP,
|
||||
compliance_score INTEGER,
|
||||
status TEXT DEFAULT 'DRAFT',
|
||||
extracted_text TEXT DEFAULT '',
|
||||
page_count INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by TEXT DEFAULT 'system'
|
||||
)
|
||||
"""
|
||||
|
||||
CREATE_FINDINGS = """
|
||||
CREATE TABLE IF NOT EXISTS vendor_findings (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL DEFAULT 'default',
|
||||
vendor_id TEXT NOT NULL DEFAULT '',
|
||||
contract_id TEXT,
|
||||
finding_type TEXT DEFAULT 'UNKNOWN',
|
||||
category TEXT DEFAULT '',
|
||||
severity TEXT DEFAULT 'MEDIUM',
|
||||
title TEXT DEFAULT '',
|
||||
description TEXT DEFAULT '',
|
||||
recommendation TEXT DEFAULT '',
|
||||
citations TEXT DEFAULT '[]',
|
||||
status TEXT DEFAULT 'OPEN',
|
||||
assignee TEXT DEFAULT '',
|
||||
due_date TIMESTAMP,
|
||||
resolution TEXT DEFAULT '',
|
||||
resolved_at TIMESTAMP,
|
||||
resolved_by TEXT DEFAULT '',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by TEXT DEFAULT 'system'
|
||||
)
|
||||
"""
|
||||
|
||||
CREATE_CONTROL_INSTANCES = """
|
||||
CREATE TABLE IF NOT EXISTS vendor_control_instances (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL DEFAULT 'default',
|
||||
vendor_id TEXT NOT NULL DEFAULT '',
|
||||
control_id TEXT DEFAULT '',
|
||||
control_domain TEXT DEFAULT '',
|
||||
status TEXT DEFAULT 'PLANNED',
|
||||
evidence_ids TEXT DEFAULT '[]',
|
||||
notes TEXT DEFAULT '',
|
||||
last_assessed_at TIMESTAMP,
|
||||
last_assessed_by TEXT DEFAULT '',
|
||||
next_assessment_date TIMESTAMP,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by TEXT DEFAULT 'system'
|
||||
)
|
||||
"""
|
||||
|
||||
CREATE_CONTROLS = """
|
||||
CREATE TABLE IF NOT EXISTS vendor_compliance_controls (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL DEFAULT 'default',
|
||||
domain TEXT DEFAULT '',
|
||||
control_code TEXT DEFAULT '',
|
||||
title TEXT DEFAULT '',
|
||||
description TEXT DEFAULT '',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
def _setup_tables():
|
||||
with engine.connect() as conn:
|
||||
for sql in [CREATE_VENDORS, CREATE_CONTRACTS, CREATE_FINDINGS,
|
||||
CREATE_CONTROL_INSTANCES, CREATE_CONTROLS]:
|
||||
conn.execute(text(sql))
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _teardown_tables():
|
||||
with engine.connect() as conn:
|
||||
for t in ["vendor_vendors", "vendor_contracts", "vendor_findings",
|
||||
"vendor_control_instances", "vendor_compliance_controls"]:
|
||||
conn.execute(text(f"DELETE FROM {t}"))
|
||||
conn.commit()
|
||||
|
||||
|
||||
_setup_tables()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Fixtures
|
||||
# =============================================================================
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clean_tables():
|
||||
_teardown_tables()
|
||||
yield
|
||||
_teardown_tables()
|
||||
|
||||
|
||||
def _create_vendor(**kwargs):
|
||||
payload = {
|
||||
"name": kwargs.get("name", "Test Vendor GmbH"),
|
||||
"country": "DE",
|
||||
"role": "PROCESSOR",
|
||||
"serviceCategory": "HOSTING",
|
||||
"status": kwargs.get("status", "ACTIVE"),
|
||||
"inherentRiskScore": kwargs.get("inherentRiskScore", 50),
|
||||
}
|
||||
payload.update(kwargs)
|
||||
resp = client.post("/api/compliance/vendor-compliance/vendors", json=payload)
|
||||
assert resp.status_code == 201
|
||||
return resp.json()["data"]
|
||||
|
||||
|
||||
def _create_contract(vendor_id, **kwargs):
|
||||
payload = {
|
||||
"vendorId": vendor_id,
|
||||
"documentType": "AVV",
|
||||
"fileName": "avv-test.pdf",
|
||||
"status": "DRAFT",
|
||||
}
|
||||
payload.update(kwargs)
|
||||
resp = client.post("/api/compliance/vendor-compliance/contracts", json=payload)
|
||||
assert resp.status_code == 201
|
||||
return resp.json()["data"]
|
||||
|
||||
|
||||
def _create_finding(vendor_id, **kwargs):
|
||||
payload = {
|
||||
"vendorId": vendor_id,
|
||||
"findingType": "GAP",
|
||||
"severity": "HIGH",
|
||||
"title": "Missing TOM Annex",
|
||||
"status": "OPEN",
|
||||
}
|
||||
payload.update(kwargs)
|
||||
resp = client.post("/api/compliance/vendor-compliance/findings", json=payload)
|
||||
assert resp.status_code == 201
|
||||
return resp.json()["data"]
|
||||
|
||||
|
||||
def _create_control_instance(vendor_id, **kwargs):
|
||||
payload = {
|
||||
"vendorId": vendor_id,
|
||||
"controlId": "C-001",
|
||||
"controlDomain": "priv",
|
||||
"status": "PASS",
|
||||
}
|
||||
payload.update(kwargs)
|
||||
resp = client.post("/api/compliance/vendor-compliance/control-instances", json=payload)
|
||||
assert resp.status_code == 201
|
||||
return resp.json()["data"]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Response Format Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestResponseFormat:
|
||||
def test_list_vendors_has_success_data_timestamp(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors")
|
||||
assert resp.status_code == 200
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert "data" in body
|
||||
assert "timestamp" in body
|
||||
|
||||
def test_create_vendor_has_success_data_timestamp(self):
|
||||
resp = client.post("/api/compliance/vendor-compliance/vendors", json={"name": "Test"})
|
||||
assert resp.status_code == 201
|
||||
body = resp.json()
|
||||
assert body["success"] is True
|
||||
assert "data" in body
|
||||
assert body["data"]["name"] == "Test"
|
||||
assert "timestamp" in body
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# camelCase / snake_case Round-Trip Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestCamelSnakeConversion:
|
||||
def test_create_with_camel_returns_camel(self):
|
||||
vendor = _create_vendor(
|
||||
name="CamelTest",
|
||||
legalForm="GmbH",
|
||||
serviceDescription="Cloud hosting",
|
||||
dataAccessLevel="CONTENT",
|
||||
inherentRiskScore=80,
|
||||
)
|
||||
assert vendor["legalForm"] == "GmbH"
|
||||
assert vendor["serviceDescription"] == "Cloud hosting"
|
||||
assert vendor["dataAccessLevel"] == "CONTENT"
|
||||
assert vendor["inherentRiskScore"] == 80
|
||||
|
||||
def test_round_trip_preserves_values(self):
|
||||
vendor = _create_vendor(
|
||||
name="RoundTrip",
|
||||
processingLocations=["DE", "US"],
|
||||
primaryContact={"name": "Max", "email": "max@test.de"},
|
||||
)
|
||||
vid = vendor["id"]
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/vendors/{vid}")
|
||||
assert resp.status_code == 200
|
||||
fetched = resp.json()["data"]
|
||||
assert fetched["processingLocations"] == ["DE", "US"]
|
||||
assert fetched["primaryContact"]["name"] == "Max"
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Vendor Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestVendorsCRUD:
|
||||
def test_list_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()["data"]
|
||||
assert data["items"] == []
|
||||
assert data["total"] == 0
|
||||
|
||||
def test_create_vendor(self):
|
||||
vendor = _create_vendor(name="Hetzner GmbH")
|
||||
assert vendor["name"] == "Hetzner GmbH"
|
||||
assert "id" in vendor
|
||||
|
||||
def test_get_vendor(self):
|
||||
vendor = _create_vendor()
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["id"] == vendor["id"]
|
||||
|
||||
def test_update_vendor(self):
|
||||
vendor = _create_vendor()
|
||||
resp = client.put(
|
||||
f"/api/compliance/vendor-compliance/vendors/{vendor['id']}",
|
||||
json={"name": "Updated Name", "country": "AT"}
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
updated = resp.json()["data"]
|
||||
assert updated["name"] == "Updated Name"
|
||||
assert updated["country"] == "AT"
|
||||
|
||||
def test_delete_vendor(self):
|
||||
vendor = _create_vendor()
|
||||
resp = client.delete(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["deleted"] is True
|
||||
resp2 = client.get(f"/api/compliance/vendor-compliance/vendors/{vendor['id']}")
|
||||
assert resp2.status_code == 404
|
||||
|
||||
def test_get_nonexistent_vendor_404(self):
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/vendors/{uuid.uuid4()}")
|
||||
assert resp.status_code == 404
|
||||
|
||||
def test_delete_nonexistent_vendor_404(self):
|
||||
resp = client.delete(f"/api/compliance/vendor-compliance/vendors/{uuid.uuid4()}")
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
class TestVendorStats:
|
||||
def test_stats_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors/stats")
|
||||
assert resp.status_code == 200
|
||||
stats = resp.json()["data"]
|
||||
assert stats["total"] == 0
|
||||
|
||||
def test_stats_with_vendors(self):
|
||||
_create_vendor(name="V1", status="ACTIVE", inherentRiskScore=80)
|
||||
_create_vendor(name="V2", status="INACTIVE", inherentRiskScore=30)
|
||||
_create_vendor(name="V3", status="PENDING_REVIEW", inherentRiskScore=90)
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors/stats")
|
||||
stats = resp.json()["data"]
|
||||
assert stats["total"] == 3
|
||||
assert stats["active"] == 1
|
||||
assert stats["inactive"] == 1
|
||||
assert stats["pendingReview"] == 1
|
||||
assert stats["highRiskCount"] == 2 # 80 and 90
|
||||
|
||||
|
||||
class TestVendorStatusPatch:
|
||||
def test_patch_status(self):
|
||||
vendor = _create_vendor(status="ACTIVE")
|
||||
resp = client.patch(
|
||||
f"/api/compliance/vendor-compliance/vendors/{vendor['id']}/status",
|
||||
json={"status": "TERMINATED"}
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["status"] == "TERMINATED"
|
||||
|
||||
def test_patch_invalid_status_400(self):
|
||||
vendor = _create_vendor()
|
||||
resp = client.patch(
|
||||
f"/api/compliance/vendor-compliance/vendors/{vendor['id']}/status",
|
||||
json={"status": "INVALID"}
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
class TestVendorFilter:
|
||||
def test_filter_by_status(self):
|
||||
_create_vendor(name="Active1", status="ACTIVE")
|
||||
_create_vendor(name="Inactive1", status="INACTIVE")
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors?status=ACTIVE")
|
||||
items = resp.json()["data"]["items"]
|
||||
assert len(items) == 1
|
||||
assert items[0]["name"] == "Active1"
|
||||
|
||||
def test_filter_by_search(self):
|
||||
_create_vendor(name="Hetzner Online GmbH")
|
||||
_create_vendor(name="AWS Deutschland")
|
||||
resp = client.get("/api/compliance/vendor-compliance/vendors?search=Hetzner")
|
||||
items = resp.json()["data"]["items"]
|
||||
assert len(items) == 1
|
||||
assert "Hetzner" in items[0]["name"]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Contract Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestContractsCRUD:
|
||||
def test_list_contracts_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/contracts")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"] == []
|
||||
|
||||
def test_create_contract(self):
|
||||
vendor = _create_vendor()
|
||||
contract = _create_contract(vendor["id"])
|
||||
assert contract["vendorId"] == vendor["id"]
|
||||
assert contract["documentType"] == "AVV"
|
||||
|
||||
def test_get_contract(self):
|
||||
vendor = _create_vendor()
|
||||
contract = _create_contract(vendor["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/contracts/{contract['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["id"] == contract["id"]
|
||||
|
||||
def test_update_contract(self):
|
||||
vendor = _create_vendor()
|
||||
contract = _create_contract(vendor["id"])
|
||||
resp = client.put(
|
||||
f"/api/compliance/vendor-compliance/contracts/{contract['id']}",
|
||||
json={"status": "ACTIVE", "complianceScore": 85}
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
updated = resp.json()["data"]
|
||||
assert updated["status"] == "ACTIVE"
|
||||
assert updated["complianceScore"] == 85
|
||||
|
||||
def test_delete_contract(self):
|
||||
vendor = _create_vendor()
|
||||
contract = _create_contract(vendor["id"])
|
||||
resp = client.delete(f"/api/compliance/vendor-compliance/contracts/{contract['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["deleted"] is True
|
||||
|
||||
|
||||
class TestContractFilter:
|
||||
def test_filter_by_vendor_id(self):
|
||||
v1 = _create_vendor(name="V1")
|
||||
v2 = _create_vendor(name="V2")
|
||||
_create_contract(v1["id"])
|
||||
_create_contract(v1["id"])
|
||||
_create_contract(v2["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/contracts?vendor_id={v1['id']}")
|
||||
assert len(resp.json()["data"]) == 2
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Finding Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestFindingsCRUD:
|
||||
def test_list_findings_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/findings")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"] == []
|
||||
|
||||
def test_create_finding(self):
|
||||
vendor = _create_vendor()
|
||||
finding = _create_finding(vendor["id"])
|
||||
assert finding["vendorId"] == vendor["id"]
|
||||
assert finding["severity"] == "HIGH"
|
||||
|
||||
def test_get_finding(self):
|
||||
vendor = _create_vendor()
|
||||
finding = _create_finding(vendor["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/findings/{finding['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["title"] == "Missing TOM Annex"
|
||||
|
||||
def test_update_finding(self):
|
||||
vendor = _create_vendor()
|
||||
finding = _create_finding(vendor["id"])
|
||||
resp = client.put(
|
||||
f"/api/compliance/vendor-compliance/findings/{finding['id']}",
|
||||
json={"status": "RESOLVED", "resolution": "TOM annex added"}
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
updated = resp.json()["data"]
|
||||
assert updated["status"] == "RESOLVED"
|
||||
assert updated["resolution"] == "TOM annex added"
|
||||
|
||||
def test_delete_finding(self):
|
||||
vendor = _create_vendor()
|
||||
finding = _create_finding(vendor["id"])
|
||||
resp = client.delete(f"/api/compliance/vendor-compliance/findings/{finding['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["deleted"] is True
|
||||
|
||||
|
||||
class TestFindingFilter:
|
||||
def test_filter_by_severity(self):
|
||||
vendor = _create_vendor()
|
||||
_create_finding(vendor["id"], severity="HIGH")
|
||||
_create_finding(vendor["id"], severity="LOW")
|
||||
resp = client.get("/api/compliance/vendor-compliance/findings?severity=HIGH")
|
||||
assert len(resp.json()["data"]) == 1
|
||||
|
||||
def test_filter_by_vendor_id(self):
|
||||
v1 = _create_vendor(name="V1")
|
||||
v2 = _create_vendor(name="V2")
|
||||
_create_finding(v1["id"])
|
||||
_create_finding(v2["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/findings?vendor_id={v1['id']}")
|
||||
assert len(resp.json()["data"]) == 1
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Control Instance Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestControlInstancesCRUD:
|
||||
def test_list_control_instances_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/control-instances")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"] == []
|
||||
|
||||
def test_create_control_instance(self):
|
||||
vendor = _create_vendor()
|
||||
ci = _create_control_instance(vendor["id"])
|
||||
assert ci["vendorId"] == vendor["id"]
|
||||
assert ci["controlId"] == "C-001"
|
||||
assert ci["status"] == "PASS"
|
||||
|
||||
def test_get_control_instance(self):
|
||||
vendor = _create_vendor()
|
||||
ci = _create_control_instance(vendor["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/control-instances/{ci['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["controlDomain"] == "priv"
|
||||
|
||||
def test_update_control_instance(self):
|
||||
vendor = _create_vendor()
|
||||
ci = _create_control_instance(vendor["id"])
|
||||
resp = client.put(
|
||||
f"/api/compliance/vendor-compliance/control-instances/{ci['id']}",
|
||||
json={"status": "FAIL", "notes": "Needs remediation"}
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
updated = resp.json()["data"]
|
||||
assert updated["status"] == "FAIL"
|
||||
assert updated["notes"] == "Needs remediation"
|
||||
|
||||
def test_delete_control_instance(self):
|
||||
vendor = _create_vendor()
|
||||
ci = _create_control_instance(vendor["id"])
|
||||
resp = client.delete(f"/api/compliance/vendor-compliance/control-instances/{ci['id']}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"]["deleted"] is True
|
||||
|
||||
|
||||
class TestControlInstanceFilter:
|
||||
def test_filter_by_vendor_id(self):
|
||||
v1 = _create_vendor(name="V1")
|
||||
v2 = _create_vendor(name="V2")
|
||||
_create_control_instance(v1["id"])
|
||||
_create_control_instance(v2["id"])
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/control-instances?vendor_id={v1['id']}")
|
||||
assert len(resp.json()["data"]) == 1
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Controls Library Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestControlsLibrary:
|
||||
def test_list_controls_empty(self):
|
||||
resp = client.get("/api/compliance/vendor-compliance/controls")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["data"] == []
|
||||
|
||||
def test_create_control(self):
|
||||
resp = client.post("/api/compliance/vendor-compliance/controls", json={
|
||||
"domain": "priv",
|
||||
"controlCode": "PRIV-001",
|
||||
"title": "Datenschutz-Folgenabschaetzung",
|
||||
"description": "Art. 35 DSGVO Compliance"
|
||||
})
|
||||
assert resp.status_code == 201
|
||||
ctrl = resp.json()["data"]
|
||||
assert ctrl["domain"] == "priv"
|
||||
assert ctrl["controlCode"] == "PRIV-001"
|
||||
|
||||
def test_delete_control(self):
|
||||
resp = client.post("/api/compliance/vendor-compliance/controls", json={
|
||||
"domain": "iam", "controlCode": "IAM-001", "title": "Access Control"
|
||||
})
|
||||
ctrl_id = resp.json()["data"]["id"]
|
||||
resp2 = client.delete(f"/api/compliance/vendor-compliance/controls/{ctrl_id}")
|
||||
assert resp2.status_code == 200
|
||||
assert resp2.json()["data"]["deleted"] is True
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Export Stub Tests
|
||||
# =============================================================================
|
||||
|
||||
class TestExportStubs:
|
||||
def test_post_export_501(self):
|
||||
resp = client.post("/api/compliance/vendor-compliance/export", json={})
|
||||
assert resp.status_code == 501
|
||||
assert resp.json()["success"] is False
|
||||
|
||||
def test_get_export_501(self):
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/export/{uuid.uuid4()}")
|
||||
assert resp.status_code == 501
|
||||
|
||||
def test_download_export_501(self):
|
||||
resp = client.get(f"/api/compliance/vendor-compliance/export/{uuid.uuid4()}/download")
|
||||
assert resp.status_code == 501
|
||||
@@ -48,8 +48,8 @@
|
||||
│ ├── docs (Port 8009) │
|
||||
│ ├── postgres │
|
||||
│ ├── valkey (Redis) │
|
||||
│ ├── qdrant │
|
||||
│ └── minio │
|
||||
│ ├── qdrant (extern: qdrant-dev.breakpilot.ai) │
|
||||
│ └── object-storage (extern: nbg1.your-objectstorage.com) │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
@@ -139,8 +139,8 @@ Compliance-Services nutzen folgende Core-Infrastruktur:
|
||||
| PostgreSQL (5432) | Alle | Zentrale Datenbank |
|
||||
| Valkey (6379) | Backend, Admin | Session Cache |
|
||||
| Vault (8200) | Alle | Secrets Management |
|
||||
| Qdrant (6333) | AI SDK, Document Crawler | Vector-Suche |
|
||||
| MinIO (9000) | Document Crawler | Datei-Storage |
|
||||
| Qdrant (qdrant-dev.breakpilot.ai) | AI SDK, Document Crawler | Vector-Suche (gehostet, API-Key) |
|
||||
| Hetzner Object Storage | TTS Service, Document Crawler | Datei-Storage (S3-kompatibel) |
|
||||
| Embedding (8087) | AI SDK | Text-Embeddings |
|
||||
| RAG Service (8097) | AI SDK | Retrieval Augmented Generation |
|
||||
| Nginx | Alle | HTTPS Reverse Proxy |
|
||||
|
||||
@@ -560,10 +560,10 @@ ai-compliance-sdk:
|
||||
environment:
|
||||
- DATABASE_URL=postgres://...
|
||||
- OLLAMA_URL=http://ollama:11434
|
||||
- QDRANT_URL=http://qdrant:6333
|
||||
- QDRANT_URL=https://qdrant-dev.breakpilot.ai
|
||||
- QDRANT_API_KEY=${QDRANT_API_KEY}
|
||||
depends_on:
|
||||
- postgres
|
||||
- qdrant
|
||||
```
|
||||
|
||||
### 9.2 Abhängigkeiten
|
||||
|
||||
@@ -128,7 +128,7 @@ KI-generierte Inhalte werden via `compliance-tts-service` (Port 8095) in Audio u
|
||||
|
||||
- **Audio:** Piper TTS → MP3 (Modell: `de_DE-thorsten-high.onnx`)
|
||||
- **Video:** FFmpeg → MP4 (Skript + Stimme + Untertitel)
|
||||
- **Storage:** MinIO (`bp-core-minio:9000`)
|
||||
- **Storage:** Hetzner Object Storage (`nbg1.your-objectstorage.com`, S3-kompatibel)
|
||||
|
||||
```
|
||||
AudioPlayer → /sdk/v1/training/modules/:id/media (audio)
|
||||
|
||||
Reference in New Issue
Block a user