fix(admin-v2): Restore complete admin-v2 application

The admin-v2 application was incomplete in the repository. This commit
restores all missing components:

- Admin pages (76 pages): dashboard, ai, compliance, dsgvo, education,
  infrastructure, communication, development, onboarding, rbac
- SDK pages (45 pages): tom, dsfa, vvt, loeschfristen, einwilligungen,
  vendor-compliance, tom-generator, dsr, and more
- Developer portal (25 pages): API docs, SDK guides, frameworks
- All components, lib files, hooks, and types
- Updated package.json with all dependencies

The issue was caused by incomplete initial repository state - the full
admin-v2 codebase existed in backend/admin-v2 and docs-src/admin-v2
but was never fully synced to the main admin-v2 directory.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
BreakPilot Dev
2026-02-08 23:40:15 -08:00
parent f28244753f
commit 660295e218
385 changed files with 138126 additions and 3079 deletions

View File

@@ -0,0 +1,107 @@
import { NextRequest, NextResponse } from 'next/server'
import { TOMRulesEngine } from '@/lib/sdk/tom-generator/rules-engine'
import { TOMGeneratorState } from '@/lib/sdk/tom-generator/types'
/**
* TOM Generator Controls Evaluation API
*
* POST /api/sdk/v1/tom-generator/controls/evaluate - Evaluate controls for given state
*
* Request body:
* {
* state: TOMGeneratorState
* }
*
* Response:
* {
* evaluations: RulesEngineResult[]
* derivedTOMs: DerivedTOM[]
* summary: {
* total: number
* required: number
* recommended: number
* optional: number
* notApplicable: number
* }
* }
*/
export async function POST(request: NextRequest) {
try {
const body = await request.json()
const { state } = body
if (!state) {
return NextResponse.json(
{ success: false, error: 'state is required in request body' },
{ status: 400 }
)
}
// Parse dates in state
const parsedState: TOMGeneratorState = {
...state,
createdAt: new Date(state.createdAt),
updatedAt: new Date(state.updatedAt),
steps: state.steps?.map((step: { id: string; completed: boolean; data: unknown; validatedAt: string | null }) => ({
...step,
validatedAt: step.validatedAt ? new Date(step.validatedAt) : null,
})) || [],
documents: [],
derivedTOMs: [],
gapAnalysis: null,
exports: [],
}
// Initialize rules engine and evaluate
const engine = new TOMRulesEngine()
const evaluations = engine.evaluateControls(parsedState)
const derivedTOMs = engine.deriveAllTOMs(parsedState)
// Calculate summary
const summary = {
total: evaluations.length,
required: evaluations.filter((e) => e.applicability === 'REQUIRED').length,
recommended: evaluations.filter((e) => e.applicability === 'RECOMMENDED').length,
optional: evaluations.filter((e) => e.applicability === 'OPTIONAL').length,
notApplicable: evaluations.filter((e) => e.applicability === 'NOT_APPLICABLE').length,
}
// Group by category
const byCategory: Record<string, typeof evaluations> = {}
evaluations.forEach((e) => {
const category = e.controlId.split('-')[1] // Extract category from ID like TOM-AC-01
if (!byCategory[category]) {
byCategory[category] = []
}
byCategory[category].push(e)
})
return NextResponse.json({
success: true,
data: {
evaluations,
derivedTOMs,
summary,
byCategory,
},
})
} catch (error) {
console.error('Failed to evaluate controls:', error)
return NextResponse.json(
{ success: false, error: 'Failed to evaluate controls' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'POST, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,128 @@
import { NextRequest, NextResponse } from 'next/server'
import {
getAllControls,
getControlById,
getControlsByCategory,
searchControls,
getCategories,
} from '@/lib/sdk/tom-generator/controls/loader'
import { ControlCategory } from '@/lib/sdk/tom-generator/types'
/**
* TOM Generator Controls API
*
* GET /api/sdk/v1/tom-generator/controls - List all controls
* GET /api/sdk/v1/tom-generator/controls?id=xxx - Get single control
* GET /api/sdk/v1/tom-generator/controls?category=xxx - Filter by category
* GET /api/sdk/v1/tom-generator/controls?search=xxx - Search controls
* GET /api/sdk/v1/tom-generator/controls?categories=true - Get categories list
*/
export async function GET(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const id = searchParams.get('id')
const category = searchParams.get('category')
const search = searchParams.get('search')
const categoriesOnly = searchParams.get('categories')
const language = (searchParams.get('language') || 'de') as 'de' | 'en'
// Get categories list
if (categoriesOnly === 'true') {
const categories = getCategories()
return NextResponse.json({
success: true,
data: categories,
})
}
// Get single control by ID
if (id) {
const control = getControlById(id)
if (!control) {
return NextResponse.json(
{ success: false, error: `Control not found: ${id}` },
{ status: 404 }
)
}
return NextResponse.json({
success: true,
data: {
...control,
// Return localized name and description
localizedName: control.name[language],
localizedDescription: control.description[language],
},
})
}
// Filter by category
if (category) {
const controls = getControlsByCategory(category as ControlCategory)
return NextResponse.json({
success: true,
data: controls.map((c) => ({
...c,
localizedName: c.name[language],
localizedDescription: c.description[language],
})),
meta: {
category,
count: controls.length,
},
})
}
// Search controls
if (search) {
const controls = searchControls(search, language)
return NextResponse.json({
success: true,
data: controls.map((c) => ({
...c,
localizedName: c.name[language],
localizedDescription: c.description[language],
})),
meta: {
query: search,
count: controls.length,
},
})
}
// Return all controls
const controls = getAllControls()
const categories = getCategories()
return NextResponse.json({
success: true,
data: controls.map((c) => ({
...c,
localizedName: c.name[language],
localizedDescription: c.description[language],
})),
meta: {
totalControls: controls.length,
categories: categories.length,
language,
},
})
} catch (error) {
console.error('Failed to fetch controls:', error)
return NextResponse.json(
{ success: false, error: 'Failed to fetch controls' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'GET, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,121 @@
import { NextRequest, NextResponse } from 'next/server'
import { TOMDocumentAnalyzer } from '@/lib/sdk/tom-generator/ai/document-analyzer'
import { evidenceStore } from '@/lib/sdk/tom-generator/evidence-store'
/**
* TOM Generator Evidence Analysis API
*
* POST /api/sdk/v1/tom-generator/evidence/[id]/analyze - Analyze evidence document with AI
*
* Request body:
* {
* tenantId: string
* documentText?: string (if already extracted)
* }
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
try {
const { id } = await params
const body = await request.json()
const { tenantId, documentText } = body
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
// Get the document
const document = await evidenceStore.getById(tenantId, id)
if (!document) {
return NextResponse.json(
{ success: false, error: `Document not found: ${id}` },
{ status: 404 }
)
}
// Check if already analyzed
if (document.aiAnalysis && document.status === 'ANALYZED') {
return NextResponse.json({
success: true,
data: document.aiAnalysis,
meta: {
alreadyAnalyzed: true,
analyzedAt: document.aiAnalysis.analyzedAt,
},
})
}
// Get document text (in production, this would be extracted from the file)
const text = documentText || `[Document content from ${document.originalName}]`
// Initialize analyzer
const analyzer = new TOMDocumentAnalyzer()
// Analyze the document
const analysisResult = await analyzer.analyzeDocument(
document,
text,
'de'
)
// Check if analysis was successful
if (!analysisResult.success || !analysisResult.analysis) {
return NextResponse.json(
{ success: false, error: analysisResult.error || 'Analysis failed' },
{ status: 500 }
)
}
const analysis = analysisResult.analysis
// Update the document with analysis results
const updatedDocument = await evidenceStore.update(tenantId, id, {
aiAnalysis: analysis,
status: 'ANALYZED',
linkedControlIds: [
...new Set([
...document.linkedControlIds,
...analysis.applicableControls,
]),
],
})
return NextResponse.json({
success: true,
data: {
analysis,
document: updatedDocument,
},
meta: {
documentId: id,
analyzedAt: analysis.analyzedAt,
confidence: analysis.confidence,
applicableControlsCount: analysis.applicableControls.length,
gapsCount: analysis.gaps.length,
},
})
} catch (error) {
console.error('Failed to analyze evidence:', error)
return NextResponse.json(
{ success: false, error: 'Failed to analyze evidence' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'POST, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,153 @@
import { NextRequest, NextResponse } from 'next/server'
import { DocumentType } from '@/lib/sdk/tom-generator/types'
import { evidenceStore } from '@/lib/sdk/tom-generator/evidence-store'
/**
* TOM Generator Evidence API
*
* GET /api/sdk/v1/tom-generator/evidence?tenantId=xxx - List all evidence documents
* DELETE /api/sdk/v1/tom-generator/evidence?tenantId=xxx&id=xxx - Delete evidence
*/
// =============================================================================
// HANDLERS
// =============================================================================
export async function GET(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const tenantId = searchParams.get('tenantId')
const documentType = searchParams.get('type') as DocumentType | null
const status = searchParams.get('status')
const id = searchParams.get('id')
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
// Get single document
if (id) {
const document = await evidenceStore.getById(tenantId, id)
if (!document) {
return NextResponse.json(
{ success: false, error: `Document not found: ${id}` },
{ status: 404 }
)
}
return NextResponse.json({
success: true,
data: document,
})
}
// Filter by type
if (documentType) {
const documents = await evidenceStore.getByType(tenantId, documentType)
return NextResponse.json({
success: true,
data: documents,
meta: {
count: documents.length,
filter: { type: documentType },
},
})
}
// Filter by status
if (status) {
const documents = await evidenceStore.getByStatus(tenantId, status)
return NextResponse.json({
success: true,
data: documents,
meta: {
count: documents.length,
filter: { status },
},
})
}
// Get all documents
const documents = await evidenceStore.getAll(tenantId)
// Group by type for summary
const byType: Record<string, number> = {}
const byStatus: Record<string, number> = {}
documents.forEach((doc) => {
byType[doc.documentType] = (byType[doc.documentType] || 0) + 1
byStatus[doc.status] = (byStatus[doc.status] || 0) + 1
})
return NextResponse.json({
success: true,
data: documents,
meta: {
count: documents.length,
byType,
byStatus,
},
})
} catch (error) {
console.error('Failed to fetch evidence:', error)
return NextResponse.json(
{ success: false, error: 'Failed to fetch evidence' },
{ status: 500 }
)
}
}
export async function DELETE(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const tenantId = searchParams.get('tenantId')
const id = searchParams.get('id')
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
if (!id) {
return NextResponse.json(
{ success: false, error: 'id is required' },
{ status: 400 }
)
}
const deleted = await evidenceStore.delete(tenantId, id)
if (!deleted) {
return NextResponse.json(
{ success: false, error: `Document not found: ${id}` },
{ status: 404 }
)
}
return NextResponse.json({
success: true,
id,
deletedAt: new Date().toISOString(),
})
} catch (error) {
console.error('Failed to delete evidence:', error)
return NextResponse.json(
{ success: false, error: 'Failed to delete evidence' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'GET, DELETE, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,155 @@
import { NextRequest, NextResponse } from 'next/server'
import { EvidenceDocument, DocumentType } from '@/lib/sdk/tom-generator/types'
import { evidenceStore } from '@/lib/sdk/tom-generator/evidence-store'
import crypto from 'crypto'
/**
* TOM Generator Evidence Upload API
*
* POST /api/sdk/v1/tom-generator/evidence/upload - Upload evidence document
*
* Request: multipart/form-data
* - file: File
* - tenantId: string
* - documentType: DocumentType
* - validFrom?: string (ISO date)
* - validUntil?: string (ISO date)
* - linkedControlIds?: string (comma-separated)
*/
// Document type detection based on filename patterns
function detectDocumentType(filename: string, mimeType: string): DocumentType {
const lower = filename.toLowerCase()
if (lower.includes('avv') || lower.includes('auftragsverarbeitung')) {
return 'AVV'
}
if (lower.includes('dpa') || lower.includes('data processing')) {
return 'DPA'
}
if (lower.includes('sla') || lower.includes('service level')) {
return 'SLA'
}
if (lower.includes('nda') || lower.includes('vertraulichkeit') || lower.includes('geheimhaltung')) {
return 'NDA'
}
if (lower.includes('policy') || lower.includes('richtlinie')) {
return 'POLICY'
}
if (lower.includes('cert') || lower.includes('zertifikat') || lower.includes('iso')) {
return 'CERTIFICATE'
}
if (lower.includes('audit') || lower.includes('prüf') || lower.includes('bericht')) {
return 'AUDIT_REPORT'
}
return 'OTHER'
}
export async function POST(request: NextRequest) {
try {
const formData = await request.formData()
const file = formData.get('file') as File | null
const tenantId = formData.get('tenantId') as string | null
const documentType = formData.get('documentType') as DocumentType | null
const validFrom = formData.get('validFrom') as string | null
const validUntil = formData.get('validUntil') as string | null
const linkedControlIdsStr = formData.get('linkedControlIds') as string | null
const uploadedBy = formData.get('uploadedBy') as string | null
if (!file) {
return NextResponse.json(
{ success: false, error: 'file is required' },
{ status: 400 }
)
}
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
// Read file data
const arrayBuffer = await file.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
// Generate hash for deduplication
const hash = crypto.createHash('sha256').update(buffer).digest('hex')
// Generate unique filename
const id = crypto.randomUUID()
const ext = file.name.split('.').pop() || 'bin'
const filename = `${id}.${ext}`
// Detect document type if not provided
const detectedType = detectDocumentType(file.name, file.type)
const finalDocumentType = documentType || detectedType
// Parse linked control IDs
const linkedControlIds = linkedControlIdsStr
? linkedControlIdsStr.split(',').map((s) => s.trim()).filter(Boolean)
: []
// Create evidence document
const document: EvidenceDocument = {
id,
filename,
originalName: file.name,
mimeType: file.type,
size: file.size,
uploadedAt: new Date(),
uploadedBy: uploadedBy || 'unknown',
documentType: finalDocumentType,
detectedType,
hash,
validFrom: validFrom ? new Date(validFrom) : null,
validUntil: validUntil ? new Date(validUntil) : null,
linkedControlIds,
aiAnalysis: null,
status: 'PENDING',
}
// Store the document metadata
// Note: In production, the actual file would be stored in MinIO/S3
await evidenceStore.add(tenantId, document)
return NextResponse.json({
success: true,
data: {
id: document.id,
filename: document.filename,
originalName: document.originalName,
mimeType: document.mimeType,
size: document.size,
documentType: document.documentType,
detectedType: document.detectedType,
status: document.status,
uploadedAt: document.uploadedAt.toISOString(),
},
meta: {
hash,
needsAnalysis: true,
analyzeUrl: `/api/sdk/v1/tom-generator/evidence/${id}/analyze`,
},
})
} catch (error) {
console.error('Failed to upload evidence:', error)
return NextResponse.json(
{ success: false, error: 'Failed to upload evidence' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'POST, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,245 @@
import { NextRequest, NextResponse } from 'next/server'
import { TOMGeneratorState } from '@/lib/sdk/tom-generator/types'
import { generateDOCXContent, generateDOCXFilename } from '@/lib/sdk/tom-generator/export/docx'
import { generatePDFContent, generatePDFFilename } from '@/lib/sdk/tom-generator/export/pdf'
import { generateZIPFiles, generateZIPFilename } from '@/lib/sdk/tom-generator/export/zip'
import crypto from 'crypto'
/**
* TOM Generator Export API
*
* POST /api/sdk/v1/tom-generator/export - Generate export
*
* Request body:
* {
* tenantId: string
* format: 'DOCX' | 'PDF' | 'JSON' | 'ZIP'
* language: 'de' | 'en'
* state: TOMGeneratorState
* options?: {
* includeEvidence?: boolean
* includeGapAnalysis?: boolean
* companyLogo?: string (base64)
* }
* }
*/
// In-memory export store for tracking exports
interface StoredExport {
id: string
tenantId: string
format: string
filename: string
content: string // Base64 encoded content
generatedAt: Date
size: number
}
const exportStore: Map<string, StoredExport> = new Map()
export async function POST(request: NextRequest) {
try {
const body = await request.json()
const { tenantId, format, language = 'de', state, options = {} } = body
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
if (!format) {
return NextResponse.json(
{ success: false, error: 'format is required (DOCX, PDF, JSON, ZIP)' },
{ status: 400 }
)
}
if (!state) {
return NextResponse.json(
{ success: false, error: 'state is required' },
{ status: 400 }
)
}
// Parse dates in state
const parsedState: TOMGeneratorState = {
...state,
createdAt: new Date(state.createdAt),
updatedAt: new Date(state.updatedAt),
steps: state.steps?.map((step: { id: string; completed: boolean; data: unknown; validatedAt: string | null }) => ({
...step,
validatedAt: step.validatedAt ? new Date(step.validatedAt) : null,
})) || [],
documents: state.documents?.map((doc: { uploadedAt: string; validFrom?: string; validUntil?: string; aiAnalysis?: { analyzedAt: string } }) => ({
...doc,
uploadedAt: new Date(doc.uploadedAt),
validFrom: doc.validFrom ? new Date(doc.validFrom) : null,
validUntil: doc.validUntil ? new Date(doc.validUntil) : null,
aiAnalysis: doc.aiAnalysis ? {
...doc.aiAnalysis,
analyzedAt: new Date(doc.aiAnalysis.analyzedAt),
} : null,
})) || [],
derivedTOMs: state.derivedTOMs?.map((tom: { implementationDate?: string; reviewDate?: string }) => ({
...tom,
implementationDate: tom.implementationDate ? new Date(tom.implementationDate) : null,
reviewDate: tom.reviewDate ? new Date(tom.reviewDate) : null,
})) || [],
gapAnalysis: state.gapAnalysis ? {
...state.gapAnalysis,
generatedAt: new Date(state.gapAnalysis.generatedAt),
} : null,
exports: state.exports?.map((exp: { generatedAt: string }) => ({
...exp,
generatedAt: new Date(exp.generatedAt),
})) || [],
}
const exportId = crypto.randomUUID()
let content: string
let filename: string
let mimeType: string
switch (format.toUpperCase()) {
case 'DOCX': {
// Generate DOCX structure (actual binary conversion would require docx library)
const docxContent = generateDOCXContent(parsedState, { language: language as 'de' | 'en', ...options })
content = Buffer.from(JSON.stringify(docxContent, null, 2)).toString('base64')
filename = generateDOCXFilename(parsedState, language as 'de' | 'en')
mimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
break
}
case 'PDF': {
// Generate PDF structure (actual binary conversion would require pdf library)
const pdfContent = generatePDFContent(parsedState, { language: language as 'de' | 'en', ...options })
content = Buffer.from(JSON.stringify(pdfContent, null, 2)).toString('base64')
filename = generatePDFFilename(parsedState, language as 'de' | 'en')
mimeType = 'application/pdf'
break
}
case 'JSON':
content = Buffer.from(JSON.stringify(parsedState, null, 2)).toString('base64')
filename = `tom-export-${tenantId}-${new Date().toISOString().split('T')[0]}.json`
mimeType = 'application/json'
break
case 'ZIP': {
const files = generateZIPFiles(parsedState, { language: language as 'de' | 'en', ...options })
// For now, return the files metadata (actual ZIP generation would require a library)
content = Buffer.from(JSON.stringify(files, null, 2)).toString('base64')
filename = generateZIPFilename(parsedState, language as 'de' | 'en')
mimeType = 'application/zip'
break
}
default:
return NextResponse.json(
{ success: false, error: `Unsupported format: ${format}` },
{ status: 400 }
)
}
// Store the export
const storedExport: StoredExport = {
id: exportId,
tenantId,
format: format.toUpperCase(),
filename,
content,
generatedAt: new Date(),
size: Buffer.from(content, 'base64').length,
}
exportStore.set(exportId, storedExport)
return NextResponse.json({
success: true,
data: {
exportId,
filename,
format: format.toUpperCase(),
mimeType,
size: storedExport.size,
generatedAt: storedExport.generatedAt.toISOString(),
downloadUrl: `/api/sdk/v1/tom-generator/export?exportId=${exportId}`,
},
})
} catch (error) {
console.error('Failed to generate export:', error)
return NextResponse.json(
{ success: false, error: 'Failed to generate export' },
{ status: 500 }
)
}
}
export async function GET(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const exportId = searchParams.get('exportId')
if (!exportId) {
return NextResponse.json(
{ success: false, error: 'exportId is required' },
{ status: 400 }
)
}
const storedExport = exportStore.get(exportId)
if (!storedExport) {
return NextResponse.json(
{ success: false, error: `Export not found: ${exportId}` },
{ status: 404 }
)
}
// Return the file as download
const buffer = Buffer.from(storedExport.content, 'base64')
let mimeType: string
switch (storedExport.format) {
case 'DOCX':
mimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
break
case 'PDF':
mimeType = 'application/pdf'
break
case 'JSON':
mimeType = 'application/json'
break
case 'ZIP':
mimeType = 'application/zip'
break
default:
mimeType = 'application/octet-stream'
}
return new NextResponse(buffer, {
headers: {
'Content-Type': mimeType,
'Content-Disposition': `attachment; filename="${storedExport.filename}"`,
'Content-Length': buffer.length.toString(),
},
})
} catch (error) {
console.error('Failed to download export:', error)
return NextResponse.json(
{ success: false, error: 'Failed to download export' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'GET, POST, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,205 @@
import { NextRequest, NextResponse } from 'next/server'
import { TOMRulesEngine } from '@/lib/sdk/tom-generator/rules-engine'
import { TOMGeneratorState, GapAnalysisResult } from '@/lib/sdk/tom-generator/types'
/**
* TOM Generator Gap Analysis API
*
* POST /api/sdk/v1/tom-generator/gap-analysis - Perform gap analysis
*
* Request body:
* {
* tenantId: string
* state: TOMGeneratorState
* }
*
* Response:
* {
* gapAnalysis: GapAnalysisResult
* }
*/
export async function POST(request: NextRequest) {
try {
const body = await request.json()
const { tenantId, state } = body
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
if (!state) {
return NextResponse.json(
{ success: false, error: 'state is required in request body' },
{ status: 400 }
)
}
// Parse dates in state
const parsedState: TOMGeneratorState = {
...state,
createdAt: new Date(state.createdAt),
updatedAt: new Date(state.updatedAt),
steps: state.steps?.map((step: { id: string; completed: boolean; data: unknown; validatedAt: string | null }) => ({
...step,
validatedAt: step.validatedAt ? new Date(step.validatedAt) : null,
})) || [],
documents: state.documents?.map((doc: { uploadedAt: string; validFrom?: string; validUntil?: string; aiAnalysis?: { analyzedAt: string } }) => ({
...doc,
uploadedAt: new Date(doc.uploadedAt),
validFrom: doc.validFrom ? new Date(doc.validFrom) : null,
validUntil: doc.validUntil ? new Date(doc.validUntil) : null,
aiAnalysis: doc.aiAnalysis ? {
...doc.aiAnalysis,
analyzedAt: new Date(doc.aiAnalysis.analyzedAt),
} : null,
})) || [],
derivedTOMs: state.derivedTOMs?.map((tom: { implementationDate?: string; reviewDate?: string }) => ({
...tom,
implementationDate: tom.implementationDate ? new Date(tom.implementationDate) : null,
reviewDate: tom.reviewDate ? new Date(tom.reviewDate) : null,
})) || [],
gapAnalysis: state.gapAnalysis ? {
...state.gapAnalysis,
generatedAt: new Date(state.gapAnalysis.generatedAt),
} : null,
exports: state.exports?.map((exp: { generatedAt: string }) => ({
...exp,
generatedAt: new Date(exp.generatedAt),
})) || [],
}
// Initialize rules engine
const engine = new TOMRulesEngine()
// Perform gap analysis using derived TOMs and documents from state
const gapAnalysis = engine.performGapAnalysis(
parsedState.derivedTOMs,
parsedState.documents
)
// Calculate detailed metrics
const metrics = calculateGapMetrics(gapAnalysis)
return NextResponse.json({
success: true,
data: {
gapAnalysis,
metrics,
generatedAt: gapAnalysis.generatedAt.toISOString(),
},
})
} catch (error) {
console.error('Failed to perform gap analysis:', error)
return NextResponse.json(
{ success: false, error: 'Failed to perform gap analysis' },
{ status: 500 }
)
}
}
function calculateGapMetrics(gapAnalysis: GapAnalysisResult) {
const totalGaps = gapAnalysis.missingControls.length +
gapAnalysis.partialControls.length +
gapAnalysis.missingEvidence.length
const criticalGaps = gapAnalysis.missingControls.filter(
(c) => c.priority === 'CRITICAL' || c.priority === 'HIGH'
).length
const mediumGaps = gapAnalysis.missingControls.filter(
(c) => c.priority === 'MEDIUM'
).length
const lowGaps = gapAnalysis.missingControls.filter(
(c) => c.priority === 'LOW'
).length
// Group missing controls by category
const gapsByCategory: Record<string, number> = {}
gapAnalysis.missingControls.forEach((control) => {
const category = control.controlId.split('-')[1] || 'OTHER'
gapsByCategory[category] = (gapsByCategory[category] || 0) + 1
})
// Calculate compliance readiness
const maxScore = 100
const deductionPerCritical = 10
const deductionPerMedium = 5
const deductionPerLow = 2
const deductionPerPartial = 3
const deductionPerMissingEvidence = 1
const deductions =
criticalGaps * deductionPerCritical +
mediumGaps * deductionPerMedium +
lowGaps * deductionPerLow +
gapAnalysis.partialControls.length * deductionPerPartial +
gapAnalysis.missingEvidence.length * deductionPerMissingEvidence
const complianceReadiness = Math.max(0, Math.min(100, maxScore - deductions))
// Prioritized action items
const prioritizedActions = [
...gapAnalysis.missingControls
.filter((c) => c.priority === 'CRITICAL')
.map((c) => ({
type: 'MISSING_CONTROL',
priority: 'CRITICAL',
controlId: c.controlId,
reason: c.reason,
action: `Implement control ${c.controlId}`,
})),
...gapAnalysis.missingControls
.filter((c) => c.priority === 'HIGH')
.map((c) => ({
type: 'MISSING_CONTROL',
priority: 'HIGH',
controlId: c.controlId,
reason: c.reason,
action: `Implement control ${c.controlId}`,
})),
...gapAnalysis.partialControls.map((c) => ({
type: 'PARTIAL_CONTROL',
priority: 'MEDIUM',
controlId: c.controlId,
missingAspects: c.missingAspects,
action: `Complete implementation of ${c.controlId}`,
})),
...gapAnalysis.missingEvidence.map((e) => ({
type: 'MISSING_EVIDENCE',
priority: 'LOW',
controlId: e.controlId,
requiredEvidence: e.requiredEvidence,
action: `Upload evidence for ${e.controlId}`,
})),
]
return {
totalGaps,
criticalGaps,
mediumGaps,
lowGaps,
partialControls: gapAnalysis.partialControls.length,
missingEvidence: gapAnalysis.missingEvidence.length,
gapsByCategory,
complianceReadiness,
overallScore: gapAnalysis.overallScore,
prioritizedActionsCount: prioritizedActions.length,
prioritizedActions: prioritizedActions.slice(0, 10), // Top 10 actions
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'POST, OPTIONS',
},
}
)
}

View File

@@ -0,0 +1,250 @@
import { NextRequest, NextResponse } from 'next/server'
import {
TOMGeneratorState,
createEmptyTOMGeneratorState,
} from '@/lib/sdk/tom-generator/types'
/**
* TOM Generator State API
*
* GET /api/sdk/v1/tom-generator/state?tenantId=xxx - Load TOM generator state
* POST /api/sdk/v1/tom-generator/state - Save TOM generator state
* DELETE /api/sdk/v1/tom-generator/state?tenantId=xxx - Clear state
*/
// =============================================================================
// STORAGE (In-Memory for development)
// =============================================================================
interface StoredTOMState {
state: TOMGeneratorState
version: number
createdAt: string
updatedAt: string
}
class InMemoryTOMStateStore {
private store: Map<string, StoredTOMState> = new Map()
async get(tenantId: string): Promise<StoredTOMState | null> {
return this.store.get(tenantId) || null
}
async save(tenantId: string, state: TOMGeneratorState, expectedVersion?: number): Promise<StoredTOMState> {
const existing = this.store.get(tenantId)
if (expectedVersion !== undefined && existing && existing.version !== expectedVersion) {
const error = new Error('Version conflict') as Error & { status: number }
error.status = 409
throw error
}
const now = new Date().toISOString()
const newVersion = existing ? existing.version + 1 : 1
const stored: StoredTOMState = {
state: {
...state,
updatedAt: new Date(now),
},
version: newVersion,
createdAt: existing?.createdAt || now,
updatedAt: now,
}
this.store.set(tenantId, stored)
return stored
}
async delete(tenantId: string): Promise<boolean> {
return this.store.delete(tenantId)
}
async list(): Promise<{ tenantId: string; updatedAt: string }[]> {
const result: { tenantId: string; updatedAt: string }[] = []
this.store.forEach((value, key) => {
result.push({ tenantId: key, updatedAt: value.updatedAt })
})
return result
}
}
const stateStore = new InMemoryTOMStateStore()
// =============================================================================
// HANDLERS
// =============================================================================
export async function GET(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const tenantId = searchParams.get('tenantId')
// List all states if no tenantId provided
if (!tenantId) {
const states = await stateStore.list()
return NextResponse.json({
success: true,
data: states,
})
}
const stored = await stateStore.get(tenantId)
if (!stored) {
// Return empty state for new tenants
const emptyState = createEmptyTOMGeneratorState(tenantId)
return NextResponse.json({
success: true,
data: {
tenantId,
state: emptyState,
version: 0,
isNew: true,
},
})
}
return NextResponse.json({
success: true,
data: {
tenantId,
state: stored.state,
version: stored.version,
lastModified: stored.updatedAt,
},
})
} catch (error) {
console.error('Failed to load TOM generator state:', error)
return NextResponse.json(
{ success: false, error: 'Failed to load state' },
{ status: 500 }
)
}
}
export async function POST(request: NextRequest) {
try {
const body = await request.json()
const { tenantId, state, version } = body
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
if (!state) {
return NextResponse.json(
{ success: false, error: 'state is required' },
{ status: 400 }
)
}
// Deserialize dates
const parsedState: TOMGeneratorState = {
...state,
createdAt: new Date(state.createdAt),
updatedAt: new Date(state.updatedAt),
steps: state.steps.map((step: { id: string; completed: boolean; data: unknown; validatedAt: string | null }) => ({
...step,
validatedAt: step.validatedAt ? new Date(step.validatedAt) : null,
})),
documents: state.documents?.map((doc: { uploadedAt: string; validFrom?: string; validUntil?: string; aiAnalysis?: { analyzedAt: string } }) => ({
...doc,
uploadedAt: new Date(doc.uploadedAt),
validFrom: doc.validFrom ? new Date(doc.validFrom) : null,
validUntil: doc.validUntil ? new Date(doc.validUntil) : null,
aiAnalysis: doc.aiAnalysis ? {
...doc.aiAnalysis,
analyzedAt: new Date(doc.aiAnalysis.analyzedAt),
} : null,
})) || [],
derivedTOMs: state.derivedTOMs?.map((tom: { implementationDate?: string; reviewDate?: string }) => ({
...tom,
implementationDate: tom.implementationDate ? new Date(tom.implementationDate) : null,
reviewDate: tom.reviewDate ? new Date(tom.reviewDate) : null,
})) || [],
gapAnalysis: state.gapAnalysis ? {
...state.gapAnalysis,
generatedAt: new Date(state.gapAnalysis.generatedAt),
} : null,
exports: state.exports?.map((exp: { generatedAt: string }) => ({
...exp,
generatedAt: new Date(exp.generatedAt),
})) || [],
}
const stored = await stateStore.save(tenantId, parsedState, version)
return NextResponse.json({
success: true,
data: {
tenantId,
state: stored.state,
version: stored.version,
lastModified: stored.updatedAt,
},
})
} catch (error) {
const err = error as Error & { status?: number }
if (err.status === 409 || err.message === 'Version conflict') {
return NextResponse.json(
{
success: false,
error: 'Version conflict. State was modified by another request.',
code: 'VERSION_CONFLICT',
},
{ status: 409 }
)
}
console.error('Failed to save TOM generator state:', error)
return NextResponse.json(
{ success: false, error: 'Failed to save state' },
{ status: 500 }
)
}
}
export async function DELETE(request: NextRequest) {
try {
const { searchParams } = new URL(request.url)
const tenantId = searchParams.get('tenantId')
if (!tenantId) {
return NextResponse.json(
{ success: false, error: 'tenantId is required' },
{ status: 400 }
)
}
const deleted = await stateStore.delete(tenantId)
return NextResponse.json({
success: true,
tenantId,
deleted,
deletedAt: new Date().toISOString(),
})
} catch (error) {
console.error('Failed to delete TOM generator state:', error)
return NextResponse.json(
{ success: false, error: 'Failed to delete state' },
{ status: 500 }
)
}
}
export async function OPTIONS() {
return NextResponse.json(
{ status: 'ok' },
{
headers: {
Allow: 'GET, POST, DELETE, OPTIONS',
},
}
)
}