Fix: Remove broken getKlausurApiUrl and clean up empty lines
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 42s
CI / test-go-edu-search (push) Successful in 34s
CI / test-python-klausur (push) Failing after 2m51s
CI / test-python-agent-core (push) Successful in 21s
CI / test-nodejs-website (push) Successful in 29s
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 42s
CI / test-go-edu-search (push) Successful in 34s
CI / test-python-klausur (push) Failing after 2m51s
CI / test-python-agent-core (push) Successful in 21s
CI / test-nodejs-website (push) Successful in 29s
sed replacement left orphaned hostname references in story page and empty lines in getApiBase functions. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
441
admin-lehrer/app/(admin)/ai/rag/_hooks/useRAGPage.ts
Normal file
441
admin-lehrer/app/(admin)/ai/rag/_hooks/useRAGPage.ts
Normal file
@@ -0,0 +1,441 @@
|
||||
'use client'
|
||||
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
import { API_PROXY, DSFA_API_PROXY } from '../rag-data'
|
||||
import type {
|
||||
TabId,
|
||||
RegulationCategory,
|
||||
CollectionStatus,
|
||||
SearchResult,
|
||||
DsfaSource,
|
||||
DsfaCorpusStatus,
|
||||
CustomDocument,
|
||||
PipelineState,
|
||||
PipelineCheckpoint,
|
||||
} from '../types'
|
||||
|
||||
export function useRAGPage() {
|
||||
const [activeTab, setActiveTab] = useState<TabId>('overview')
|
||||
const [collectionStatus, setCollectionStatus] = useState<CollectionStatus | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [searchResults, setSearchResults] = useState<SearchResult[]>([])
|
||||
const [searching, setSearching] = useState(false)
|
||||
const [selectedRegulations, setSelectedRegulations] = useState<string[]>([])
|
||||
const [ingestionRunning, setIngestionRunning] = useState(false)
|
||||
const [ingestionLog, setIngestionLog] = useState<string[]>([])
|
||||
const [pipelineState, setPipelineState] = useState<PipelineState | null>(null)
|
||||
const [pipelineLoading, setPipelineLoading] = useState(false)
|
||||
const [pipelineStarting, setPipelineStarting] = useState(false)
|
||||
const [expandedRegulation, setExpandedRegulation] = useState<string | null>(null)
|
||||
const [autoRefresh, setAutoRefresh] = useState(true)
|
||||
const [elapsedTime, setElapsedTime] = useState<string>('')
|
||||
const [expandedDocTypes, setExpandedDocTypes] = useState<string[]>(['eu_regulation', 'eu_directive'])
|
||||
const [expandedMatrixDoc, setExpandedMatrixDoc] = useState<string | null>(null)
|
||||
|
||||
// DSFA corpus state
|
||||
const [dsfaSources, setDsfaSources] = useState<DsfaSource[]>([])
|
||||
const [dsfaStatus, setDsfaStatus] = useState<DsfaCorpusStatus | null>(null)
|
||||
const [dsfaLoading, setDsfaLoading] = useState(false)
|
||||
const [regulationCategory, setRegulationCategory] = useState<RegulationCategory>('regulations')
|
||||
const [expandedDsfaSource, setExpandedDsfaSource] = useState<string | null>(null)
|
||||
|
||||
// Data tab state
|
||||
const [customDocuments, setCustomDocuments] = useState<CustomDocument[]>([])
|
||||
const [uploadFile, setUploadFile] = useState<File | null>(null)
|
||||
const [uploadTitle, setUploadTitle] = useState('')
|
||||
const [uploadCode, setUploadCode] = useState('')
|
||||
const [uploading, setUploading] = useState(false)
|
||||
const [linkUrl, setLinkUrl] = useState('')
|
||||
const [linkTitle, setLinkTitle] = useState('')
|
||||
const [linkCode, setLinkCode] = useState('')
|
||||
const [addingLink, setAddingLink] = useState(false)
|
||||
|
||||
const fetchStatus = useCallback(async () => {
|
||||
setLoading(true)
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=status`)
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
setCollectionStatus(data)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch status:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const fetchPipeline = useCallback(async () => {
|
||||
setPipelineLoading(true)
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=pipeline-checkpoints`)
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
setPipelineState(data)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch pipeline:', error)
|
||||
} finally {
|
||||
setPipelineLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const fetchDsfaStatus = useCallback(async () => {
|
||||
setDsfaLoading(true)
|
||||
try {
|
||||
const [statusRes, sourcesRes] = await Promise.all([
|
||||
fetch(`${DSFA_API_PROXY}?action=status`),
|
||||
fetch(`${DSFA_API_PROXY}?action=sources`),
|
||||
])
|
||||
if (statusRes.ok) {
|
||||
const data = await statusRes.json()
|
||||
setDsfaStatus(data)
|
||||
}
|
||||
if (sourcesRes.ok) {
|
||||
const data = await sourcesRes.json()
|
||||
setDsfaSources(data.sources || data || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch DSFA status:', error)
|
||||
} finally {
|
||||
setDsfaLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const fetchCustomDocuments = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=custom-documents`)
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
setCustomDocuments(data.documents || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch custom documents:', error)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!uploadFile || !uploadTitle || !uploadCode) return
|
||||
|
||||
setUploading(true)
|
||||
try {
|
||||
const formData = new FormData()
|
||||
formData.append('file', uploadFile)
|
||||
formData.append('title', uploadTitle)
|
||||
formData.append('code', uploadCode)
|
||||
formData.append('document_type', 'custom')
|
||||
|
||||
const res = await fetch(`${API_PROXY}?action=upload`, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (res.ok) {
|
||||
setUploadFile(null)
|
||||
setUploadTitle('')
|
||||
setUploadCode('')
|
||||
fetchCustomDocuments()
|
||||
fetchStatus()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Upload failed:', error)
|
||||
} finally {
|
||||
setUploading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleAddLink = async () => {
|
||||
if (!linkUrl || !linkTitle || !linkCode) return
|
||||
|
||||
setAddingLink(true)
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=add-link`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
url: linkUrl,
|
||||
title: linkTitle,
|
||||
code: linkCode,
|
||||
document_type: 'custom',
|
||||
}),
|
||||
})
|
||||
|
||||
if (res.ok) {
|
||||
setLinkUrl('')
|
||||
setLinkTitle('')
|
||||
setLinkCode('')
|
||||
fetchCustomDocuments()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Add link failed:', error)
|
||||
} finally {
|
||||
setAddingLink(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteDocument = async (docId: string) => {
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=delete-document&docId=${docId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (res.ok) {
|
||||
fetchCustomDocuments()
|
||||
fetchStatus()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Delete failed:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleStartPipeline = async (skipIngestion: boolean = false) => {
|
||||
setPipelineStarting(true)
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=start-pipeline`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
force_reindex: false,
|
||||
skip_ingestion: skipIngestion,
|
||||
}),
|
||||
})
|
||||
|
||||
if (res.ok) {
|
||||
setTimeout(() => {
|
||||
fetchPipeline()
|
||||
setPipelineStarting(false)
|
||||
}, 2000)
|
||||
} else {
|
||||
setPipelineStarting(false)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to start pipeline:', error)
|
||||
setPipelineStarting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleSearch = async () => {
|
||||
if (!searchQuery.trim()) return
|
||||
|
||||
setSearching(true)
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
action: 'search',
|
||||
query: searchQuery,
|
||||
top_k: '5',
|
||||
})
|
||||
if (selectedRegulations.length > 0) {
|
||||
params.append('regulations', selectedRegulations.join(','))
|
||||
}
|
||||
|
||||
const res = await fetch(`${API_PROXY}?${params}`)
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
setSearchResults(data.results || [])
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Search failed:', error)
|
||||
} finally {
|
||||
setSearching(false)
|
||||
}
|
||||
}
|
||||
|
||||
const triggerIngestion = async () => {
|
||||
setIngestionRunning(true)
|
||||
setIngestionLog(['Starte Re-Ingestion aller 19 Regulierungen...'])
|
||||
|
||||
try {
|
||||
const res = await fetch(`${API_PROXY}?action=ingest`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ force: true }),
|
||||
})
|
||||
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
setIngestionLog((prev) => [...prev, 'Ingestion gestartet. Job-ID: ' + (data.job_id || 'N/A')])
|
||||
const checkStatus = setInterval(async () => {
|
||||
try {
|
||||
const statusRes = await fetch(`${API_PROXY}?action=ingestion-status`)
|
||||
if (statusRes.ok) {
|
||||
const statusData = await statusRes.json()
|
||||
if (statusData.completed) {
|
||||
clearInterval(checkStatus)
|
||||
setIngestionRunning(false)
|
||||
setIngestionLog((prev) => [...prev, 'Ingestion abgeschlossen!'])
|
||||
fetchStatus()
|
||||
} else if (statusData.current_regulation) {
|
||||
setIngestionLog((prev) => [
|
||||
...prev,
|
||||
`Verarbeite: ${statusData.current_regulation} (${statusData.processed}/${statusData.total})`,
|
||||
])
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore polling errors
|
||||
}
|
||||
}, 5000)
|
||||
} else {
|
||||
setIngestionLog((prev) => [...prev, 'Fehler: ' + res.statusText])
|
||||
setIngestionRunning(false)
|
||||
}
|
||||
} catch (error) {
|
||||
setIngestionLog((prev) => [...prev, 'Fehler: ' + String(error)])
|
||||
setIngestionRunning(false)
|
||||
}
|
||||
}
|
||||
|
||||
const getRegulationChunks = (code: string): number => {
|
||||
return collectionStatus?.regulations?.[code] || 0
|
||||
}
|
||||
|
||||
const getTotalChunks = (): number => {
|
||||
return collectionStatus?.totalPoints || 0
|
||||
}
|
||||
|
||||
// Initial data fetch
|
||||
useEffect(() => {
|
||||
fetchStatus()
|
||||
fetchDsfaStatus()
|
||||
}, [fetchStatus, fetchDsfaStatus])
|
||||
|
||||
// Fetch pipeline when tab changes
|
||||
useEffect(() => {
|
||||
if (activeTab === 'pipeline') {
|
||||
fetchPipeline()
|
||||
}
|
||||
}, [activeTab, fetchPipeline])
|
||||
|
||||
// Fetch custom documents when data tab is active
|
||||
useEffect(() => {
|
||||
if (activeTab === 'data') {
|
||||
fetchCustomDocuments()
|
||||
}
|
||||
}, [activeTab, fetchCustomDocuments])
|
||||
|
||||
// Auto-refresh pipeline status when running
|
||||
useEffect(() => {
|
||||
if (activeTab !== 'pipeline' || !autoRefresh) return
|
||||
|
||||
const isRunning = pipelineState?.status === 'running'
|
||||
|
||||
if (isRunning) {
|
||||
const interval = setInterval(() => {
|
||||
fetchPipeline()
|
||||
fetchStatus()
|
||||
}, 5000)
|
||||
|
||||
return () => clearInterval(interval)
|
||||
}
|
||||
}, [activeTab, autoRefresh, pipelineState?.status, fetchPipeline, fetchStatus])
|
||||
|
||||
// Update elapsed time
|
||||
useEffect(() => {
|
||||
if (!pipelineState?.started_at || pipelineState?.status !== 'running') {
|
||||
setElapsedTime('')
|
||||
return
|
||||
}
|
||||
|
||||
const updateElapsed = () => {
|
||||
const start = new Date(pipelineState.started_at!).getTime()
|
||||
const now = Date.now()
|
||||
const diff = Math.floor((now - start) / 1000)
|
||||
|
||||
const hours = Math.floor(diff / 3600)
|
||||
const minutes = Math.floor((diff % 3600) / 60)
|
||||
const seconds = diff % 60
|
||||
|
||||
if (hours > 0) {
|
||||
setElapsedTime(`${hours}h ${minutes}m ${seconds}s`)
|
||||
} else if (minutes > 0) {
|
||||
setElapsedTime(`${minutes}m ${seconds}s`)
|
||||
} else {
|
||||
setElapsedTime(`${seconds}s`)
|
||||
}
|
||||
}
|
||||
|
||||
updateElapsed()
|
||||
const interval = setInterval(updateElapsed, 1000)
|
||||
return () => clearInterval(interval)
|
||||
}, [pipelineState?.started_at, pipelineState?.status])
|
||||
|
||||
return {
|
||||
// Tab state
|
||||
activeTab,
|
||||
setActiveTab,
|
||||
|
||||
// Collection status
|
||||
collectionStatus,
|
||||
loading,
|
||||
fetchStatus,
|
||||
|
||||
// Search
|
||||
searchQuery,
|
||||
setSearchQuery,
|
||||
searchResults,
|
||||
searching,
|
||||
selectedRegulations,
|
||||
setSelectedRegulations,
|
||||
handleSearch,
|
||||
|
||||
// Ingestion
|
||||
ingestionRunning,
|
||||
ingestionLog,
|
||||
triggerIngestion,
|
||||
|
||||
// Pipeline
|
||||
pipelineState,
|
||||
pipelineLoading,
|
||||
pipelineStarting,
|
||||
autoRefresh,
|
||||
setAutoRefresh,
|
||||
elapsedTime,
|
||||
fetchPipeline,
|
||||
handleStartPipeline,
|
||||
|
||||
// Regulation expansion
|
||||
expandedRegulation,
|
||||
setExpandedRegulation,
|
||||
expandedDocTypes,
|
||||
setExpandedDocTypes,
|
||||
expandedMatrixDoc,
|
||||
setExpandedMatrixDoc,
|
||||
|
||||
// DSFA
|
||||
dsfaSources,
|
||||
dsfaStatus,
|
||||
dsfaLoading,
|
||||
regulationCategory,
|
||||
setRegulationCategory,
|
||||
expandedDsfaSource,
|
||||
setExpandedDsfaSource,
|
||||
fetchDsfaStatus,
|
||||
|
||||
// Data tab
|
||||
customDocuments,
|
||||
uploadFile,
|
||||
setUploadFile,
|
||||
uploadTitle,
|
||||
setUploadTitle,
|
||||
uploadCode,
|
||||
setUploadCode,
|
||||
uploading,
|
||||
handleUpload,
|
||||
linkUrl,
|
||||
setLinkUrl,
|
||||
linkTitle,
|
||||
setLinkTitle,
|
||||
linkCode,
|
||||
setLinkCode,
|
||||
addingLink,
|
||||
handleAddLink,
|
||||
handleDeleteDocument,
|
||||
fetchCustomDocuments,
|
||||
|
||||
// Helpers
|
||||
getRegulationChunks,
|
||||
getTotalChunks,
|
||||
}
|
||||
}
|
||||
|
||||
export type UseRAGPageReturn = ReturnType<typeof useRAGPage>
|
||||
Reference in New Issue
Block a user