Files
breakpilot-lehrer/admin-lehrer/app/api/sdk/v1/crawler/[[...path]]/route.ts
Benjamin Boenisch 1bd1a0002a feat: Sync Document Crawler frontend page and API proxy (Phase 1.4)
Synced from admin-v2: 4-tab crawler page and catch-all API proxy route.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-13 20:35:29 +01:00

115 lines
3.0 KiB
TypeScript

/**
* Document Crawler API Proxy - Catch-all route
* Proxies all /api/sdk/v1/crawler/* requests to document-crawler service (port 8098)
*/
import { NextRequest, NextResponse } from 'next/server'
const CRAWLER_BACKEND_URL = process.env.CRAWLER_API_URL || 'http://document-crawler:8098'
async function proxyRequest(
request: NextRequest,
pathSegments: string[] | undefined,
method: string
) {
const pathStr = pathSegments?.join('/') || ''
const searchParams = request.nextUrl.searchParams.toString()
const basePath = `${CRAWLER_BACKEND_URL}/api/v1/crawler`
const url = pathStr
? `${basePath}/${pathStr}${searchParams ? `?${searchParams}` : ''}`
: `${basePath}${searchParams ? `?${searchParams}` : ''}`
try {
const headers: HeadersInit = {
'Content-Type': 'application/json',
}
// Forward all relevant headers
const headerNames = ['authorization', 'x-tenant-id', 'x-user-id', 'x-namespace-id', 'x-tenant-slug']
for (const name of headerNames) {
const value = request.headers.get(name)
if (value) {
headers[name] = value
}
}
const fetchOptions: RequestInit = {
method,
headers,
signal: AbortSignal.timeout(30000),
}
// Forward body for non-GET requests
if (method !== 'GET' && method !== 'DELETE') {
try {
const body = await request.json()
fetchOptions.body = JSON.stringify(body)
} catch {
// No body or non-JSON body
}
}
const response = await fetch(url, fetchOptions)
if (!response.ok) {
const errorText = await response.text()
let errorJson
try {
errorJson = JSON.parse(errorText)
} catch {
errorJson = { error: errorText }
}
return NextResponse.json(
{ error: `Backend Error: ${response.status}`, ...errorJson },
{ status: response.status }
)
}
// Handle 204 No Content
if (response.status === 204) {
return new NextResponse(null, { status: 204 })
}
const data = await response.json()
return NextResponse.json(data)
} catch (error) {
console.error('Document Crawler API proxy error:', error)
return NextResponse.json(
{ error: 'Verbindung zum Document Crawler Backend fehlgeschlagen' },
{ status: 503 }
)
}
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ path?: string[] }> }
) {
const { path } = await params
return proxyRequest(request, path, 'GET')
}
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ path?: string[] }> }
) {
const { path } = await params
return proxyRequest(request, path, 'POST')
}
export async function PUT(
request: NextRequest,
{ params }: { params: Promise<{ path?: string[] }> }
) {
const { path } = await params
return proxyRequest(request, path, 'PUT')
}
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ path?: string[] }> }
) {
const { path } = await params
return proxyRequest(request, path, 'DELETE')
}