refactor(admin): split evidence, import, portfolio pages

Extract components and hooks from oversized pages into colocated
_components/ and _hooks/ subdirectories to enforce the 500-LOC hard cap.
page.tsx files reduced to 205, 121, and 136 LOC respectively.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Sharang Parnerkar
2026-04-16 13:07:04 +02:00
parent 9096aad693
commit 7907b3f25b
42 changed files with 3568 additions and 3591 deletions

View File

@@ -0,0 +1,184 @@
'use client'
import { useState, useCallback, useEffect } from 'react'
import { useSDK } from '@/lib/sdk'
import type { ImportedDocument, ImportedDocumentType, GapAnalysis, GapItem } from '@/lib/sdk/types'
import type { UploadedFile } from '../_components/FileItem'
export function useImport() {
const { state, addImportedDocument, setGapAnalysis, dispatch } = useSDK()
const [files, setFiles] = useState<UploadedFile[]>([])
const [isAnalyzing, setIsAnalyzing] = useState(false)
const [analysisResult, setAnalysisResult] = useState<GapAnalysis | null>(null)
const [importHistory, setImportHistory] = useState<any[]>([])
const [historyLoading, setHistoryLoading] = useState(false)
const [objectUrls, setObjectUrls] = useState<string[]>([])
useEffect(() => {
const loadHistory = async () => {
setHistoryLoading(true)
try {
const response = await fetch('/api/sdk/v1/import?tenant_id=default')
if (response.ok) {
const data = await response.json()
setImportHistory(Array.isArray(data) ? data : data.items || [])
}
} catch (err) {
console.error('Failed to load import history:', err)
} finally {
setHistoryLoading(false)
}
}
loadHistory()
}, [analysisResult])
useEffect(() => {
return () => {
objectUrls.forEach(url => URL.revokeObjectURL(url))
}
}, [objectUrls])
const createTrackedObjectURL = useCallback((file: File) => {
const url = URL.createObjectURL(file)
setObjectUrls(prev => [...prev, url])
return url
}, [])
const handleFilesAdded = useCallback((newFiles: File[]) => {
const uploadedFiles: UploadedFile[] = newFiles.map(file => ({
id: `file-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
file,
type: 'OTHER' as ImportedDocumentType,
status: 'pending' as const,
progress: 0,
}))
setFiles(prev => [...prev, ...uploadedFiles])
}, [])
const handleTypeChange = useCallback((id: string, type: ImportedDocumentType) => {
setFiles(prev => prev.map(f => (f.id === id ? { ...f, type } : f)))
}, [])
const handleRemove = useCallback((id: string) => {
setFiles(prev => prev.filter(f => f.id !== id))
}, [])
const handleDeleteHistory = async (id: string) => {
try {
const res = await fetch(`/api/sdk/v1/import/${id}`, { method: 'DELETE' })
if (res.ok) {
setImportHistory(prev => prev.filter(h => h.id !== id))
}
} catch (err) {
console.error('Failed to delete import:', err)
}
}
const handleAnalyze = async () => {
if (files.length === 0) return
setIsAnalyzing(true)
const allGaps: GapItem[] = []
for (let i = 0; i < files.length; i++) {
const file = files[i]
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, status: 'uploading' as const } : f)))
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, progress: 30 } : f)))
const formData = new FormData()
formData.append('file', file.file)
formData.append('document_type', file.type)
formData.append('tenant_id', 'default')
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, progress: 60, status: 'analyzing' as const } : f)))
try {
const response = await fetch('/api/sdk/v1/import/analyze', { method: 'POST', body: formData })
if (response.ok) {
const result = await response.json()
const doc: ImportedDocument = {
id: result.document_id || file.id,
name: file.file.name,
type: result.detected_type || file.type,
fileUrl: createTrackedObjectURL(file.file),
uploadedAt: new Date(),
analyzedAt: new Date(),
analysisResult: {
detectedType: result.detected_type || file.type,
confidence: result.confidence || 0.85,
extractedEntities: result.extracted_entities || [],
gaps: result.gap_analysis?.gaps || [],
recommendations: result.recommendations || [],
},
}
addImportedDocument(doc)
if (result.gap_analysis?.gaps) {
for (const gap of result.gap_analysis.gaps) {
allGaps.push({
id: gap.id,
category: gap.category,
description: gap.description,
severity: gap.severity,
regulation: gap.regulation,
requiredAction: gap.required_action,
relatedStepId: gap.related_step_id || '',
})
}
}
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, progress: 100, status: 'complete' as const } : f)))
} else {
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, status: 'error' as const, error: 'Analyse fehlgeschlagen' } : f)))
}
} catch {
const doc: ImportedDocument = {
id: file.id,
name: file.file.name,
type: file.type,
fileUrl: createTrackedObjectURL(file.file),
uploadedAt: new Date(),
analyzedAt: new Date(),
analysisResult: {
detectedType: file.type,
confidence: 0.5,
extractedEntities: [],
gaps: [],
recommendations: ['Offline-Modus — Backend nicht erreichbar, manuelle Pruefung empfohlen'],
},
}
addImportedDocument(doc)
setFiles(prev => prev.map(f => (f.id === file.id ? { ...f, progress: 100, status: 'complete' as const, error: 'offline' } : f)))
}
}
const gapAnalysis: GapAnalysis = {
id: `analysis-${Date.now()}`,
createdAt: new Date(),
totalGaps: allGaps.length,
criticalGaps: allGaps.filter(g => g.severity === 'CRITICAL').length,
highGaps: allGaps.filter(g => g.severity === 'HIGH').length,
mediumGaps: allGaps.filter(g => g.severity === 'MEDIUM').length,
lowGaps: allGaps.filter(g => g.severity === 'LOW').length,
gaps: allGaps,
recommendedPackages: allGaps.length > 0 ? ['analyse', 'dokumentation'] : [],
}
setAnalysisResult(gapAnalysis)
setGapAnalysis(gapAnalysis)
setIsAnalyzing(false)
dispatch({ type: 'COMPLETE_STEP', payload: 'import' })
}
return {
state,
files,
setFiles,
isAnalyzing,
analysisResult,
importHistory,
historyLoading,
handleFilesAdded,
handleTypeChange,
handleRemove,
handleAnalyze,
handleDeleteHistory,
}
}