A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
164 lines
4.6 KiB
TypeScript
164 lines
4.6 KiB
TypeScript
/**
|
|
* API Proxy for Uni-Crawler (edu-search-service Orchestrator)
|
|
*
|
|
* Proxies requests to the edu-search-service orchestrator API
|
|
* so that the browser doesn't need direct access to the internal service
|
|
*/
|
|
|
|
import { NextRequest, NextResponse } from 'next/server'
|
|
|
|
// Use EDU_SEARCH_URL environment variable - internal Docker URL
|
|
const EDU_SEARCH_URL = process.env.EDU_SEARCH_URL || 'http://localhost:8086'
|
|
const EDU_SEARCH_API_KEY = process.env.EDU_SEARCH_API_KEY || 'dev-key'
|
|
|
|
// GET: Fetch status, queue, or universities
|
|
export async function GET(request: NextRequest) {
|
|
const searchParams = request.nextUrl.searchParams
|
|
const action = searchParams.get('action')
|
|
|
|
try {
|
|
let endpoint = ''
|
|
let requiresAuth = true
|
|
|
|
switch (action) {
|
|
case 'status':
|
|
endpoint = '/v1/crawl/status'
|
|
break
|
|
case 'queue':
|
|
endpoint = '/v1/crawl/queue'
|
|
break
|
|
case 'universities':
|
|
endpoint = '/api/v1/universities'
|
|
requiresAuth = false
|
|
break
|
|
default:
|
|
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
|
}
|
|
|
|
const headers: HeadersInit = {
|
|
'Content-Type': 'application/json',
|
|
}
|
|
if (requiresAuth) {
|
|
headers['Authorization'] = `Bearer ${EDU_SEARCH_API_KEY}`
|
|
}
|
|
|
|
const response = await fetch(`${EDU_SEARCH_URL}${endpoint}`, { headers })
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text()
|
|
return NextResponse.json(
|
|
{ error: `Backend error: ${response.status}`, details: errorText },
|
|
{ status: response.status }
|
|
)
|
|
}
|
|
|
|
const data = await response.json()
|
|
return NextResponse.json(data)
|
|
} catch (error) {
|
|
console.error('uni-crawler API GET error:', error)
|
|
return NextResponse.json(
|
|
{ error: 'Failed to connect to edu-search-service' },
|
|
{ status: 503 }
|
|
)
|
|
}
|
|
}
|
|
|
|
// POST: Start/stop orchestrator, add to queue, etc.
|
|
export async function POST(request: NextRequest) {
|
|
const searchParams = request.nextUrl.searchParams
|
|
const action = searchParams.get('action')
|
|
|
|
try {
|
|
let endpoint = ''
|
|
let method = 'POST'
|
|
let body = null
|
|
|
|
switch (action) {
|
|
case 'start':
|
|
endpoint = '/v1/crawl/start'
|
|
break
|
|
case 'stop':
|
|
endpoint = '/v1/crawl/stop'
|
|
break
|
|
case 'queue':
|
|
endpoint = '/v1/crawl/queue'
|
|
body = await request.json()
|
|
break
|
|
case 'pause':
|
|
const pauseId = searchParams.get('university_id')
|
|
endpoint = `/v1/crawl/queue/${pauseId}/pause`
|
|
break
|
|
case 'resume':
|
|
const resumeId = searchParams.get('university_id')
|
|
endpoint = `/v1/crawl/queue/${resumeId}/resume`
|
|
break
|
|
default:
|
|
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
|
}
|
|
|
|
const response = await fetch(`${EDU_SEARCH_URL}${endpoint}`, {
|
|
method,
|
|
headers: {
|
|
'Authorization': `Bearer ${EDU_SEARCH_API_KEY}`,
|
|
'Content-Type': 'application/json',
|
|
},
|
|
body: body ? JSON.stringify(body) : undefined,
|
|
})
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text()
|
|
let errorData = { error: errorText }
|
|
try {
|
|
errorData = JSON.parse(errorText)
|
|
} catch {}
|
|
return NextResponse.json(errorData, { status: response.status })
|
|
}
|
|
|
|
const data = await response.json()
|
|
return NextResponse.json(data)
|
|
} catch (error) {
|
|
console.error('uni-crawler API POST error:', error)
|
|
return NextResponse.json(
|
|
{ error: 'Failed to connect to edu-search-service' },
|
|
{ status: 503 }
|
|
)
|
|
}
|
|
}
|
|
|
|
// DELETE: Remove from queue
|
|
export async function DELETE(request: NextRequest) {
|
|
const searchParams = request.nextUrl.searchParams
|
|
const universityId = searchParams.get('university_id')
|
|
|
|
if (!universityId) {
|
|
return NextResponse.json({ error: 'university_id required' }, { status: 400 })
|
|
}
|
|
|
|
try {
|
|
const response = await fetch(`${EDU_SEARCH_URL}/v1/crawl/queue/${universityId}`, {
|
|
method: 'DELETE',
|
|
headers: {
|
|
'Authorization': `Bearer ${EDU_SEARCH_API_KEY}`,
|
|
'Content-Type': 'application/json',
|
|
},
|
|
})
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text()
|
|
return NextResponse.json(
|
|
{ error: `Backend error: ${response.status}`, details: errorText },
|
|
{ status: response.status }
|
|
)
|
|
}
|
|
|
|
const data = await response.json()
|
|
return NextResponse.json(data)
|
|
} catch (error) {
|
|
console.error('uni-crawler API DELETE error:', error)
|
|
return NextResponse.json(
|
|
{ error: 'Failed to connect to edu-search-service' },
|
|
{ status: 503 }
|
|
)
|
|
}
|
|
}
|