Add Next.js pages for Academy, Whistleblower, Incidents, Document Crawler, DSB Portal, Industry Templates, Multi-Tenant and SSO. Add API proxy routes and TypeScript SDK client libraries. Add server binary to .gitignore. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
115 lines
3.0 KiB
TypeScript
115 lines
3.0 KiB
TypeScript
/**
|
|
* Document Crawler API Proxy - Catch-all route
|
|
* Proxies all /api/sdk/v1/crawler/* requests to document-crawler service (port 8098)
|
|
*/
|
|
|
|
import { NextRequest, NextResponse } from 'next/server'
|
|
|
|
const CRAWLER_BACKEND_URL = process.env.CRAWLER_API_URL || 'http://document-crawler:8098'
|
|
|
|
async function proxyRequest(
|
|
request: NextRequest,
|
|
pathSegments: string[] | undefined,
|
|
method: string
|
|
) {
|
|
const pathStr = pathSegments?.join('/') || ''
|
|
const searchParams = request.nextUrl.searchParams.toString()
|
|
const basePath = `${CRAWLER_BACKEND_URL}/api/v1/crawler`
|
|
const url = pathStr
|
|
? `${basePath}/${pathStr}${searchParams ? `?${searchParams}` : ''}`
|
|
: `${basePath}${searchParams ? `?${searchParams}` : ''}`
|
|
|
|
try {
|
|
const headers: HeadersInit = {
|
|
'Content-Type': 'application/json',
|
|
}
|
|
|
|
// Forward all relevant headers
|
|
const headerNames = ['authorization', 'x-tenant-id', 'x-user-id', 'x-namespace-id', 'x-tenant-slug']
|
|
for (const name of headerNames) {
|
|
const value = request.headers.get(name)
|
|
if (value) {
|
|
headers[name] = value
|
|
}
|
|
}
|
|
|
|
const fetchOptions: RequestInit = {
|
|
method,
|
|
headers,
|
|
signal: AbortSignal.timeout(30000),
|
|
}
|
|
|
|
// Forward body for non-GET requests
|
|
if (method !== 'GET' && method !== 'DELETE') {
|
|
try {
|
|
const body = await request.json()
|
|
fetchOptions.body = JSON.stringify(body)
|
|
} catch {
|
|
// No body or non-JSON body
|
|
}
|
|
}
|
|
|
|
const response = await fetch(url, fetchOptions)
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text()
|
|
let errorJson
|
|
try {
|
|
errorJson = JSON.parse(errorText)
|
|
} catch {
|
|
errorJson = { error: errorText }
|
|
}
|
|
return NextResponse.json(
|
|
{ error: `Backend Error: ${response.status}`, ...errorJson },
|
|
{ status: response.status }
|
|
)
|
|
}
|
|
|
|
// Handle 204 No Content
|
|
if (response.status === 204) {
|
|
return new NextResponse(null, { status: 204 })
|
|
}
|
|
|
|
const data = await response.json()
|
|
return NextResponse.json(data)
|
|
} catch (error) {
|
|
console.error('Document Crawler API proxy error:', error)
|
|
return NextResponse.json(
|
|
{ error: 'Verbindung zum Document Crawler Backend fehlgeschlagen' },
|
|
{ status: 503 }
|
|
)
|
|
}
|
|
}
|
|
|
|
export async function GET(
|
|
request: NextRequest,
|
|
{ params }: { params: Promise<{ path?: string[] }> }
|
|
) {
|
|
const { path } = await params
|
|
return proxyRequest(request, path, 'GET')
|
|
}
|
|
|
|
export async function POST(
|
|
request: NextRequest,
|
|
{ params }: { params: Promise<{ path?: string[] }> }
|
|
) {
|
|
const { path } = await params
|
|
return proxyRequest(request, path, 'POST')
|
|
}
|
|
|
|
export async function PUT(
|
|
request: NextRequest,
|
|
{ params }: { params: Promise<{ path?: string[] }> }
|
|
) {
|
|
const { path } = await params
|
|
return proxyRequest(request, path, 'PUT')
|
|
}
|
|
|
|
export async function DELETE(
|
|
request: NextRequest,
|
|
{ params }: { params: Promise<{ path?: string[] }> }
|
|
) {
|
|
const { path } = await params
|
|
return proxyRequest(request, path, 'DELETE')
|
|
}
|