Add admin-core frontend (Port 3008)
Next.js admin frontend for Core with 3 categories (Communication, Infrastructure, Development), 13 modules, 2 roles (developer, ops), and 11 API proxy routes. Includes docker-compose service and nginx SSL config. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
210
admin-core/app/api/admin/communication/stats/route.ts
Normal file
210
admin-core/app/api/admin/communication/stats/route.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
/**
|
||||
* Communication Admin API Route - Stats Proxy
|
||||
*
|
||||
* Proxies requests to Matrix/Jitsi admin endpoints via backend
|
||||
* Aggregates statistics from both services
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
// Service URLs
|
||||
const BACKEND_URL = process.env.NEXT_PUBLIC_BACKEND_URL || 'http://localhost:8000'
|
||||
const CONSENT_SERVICE_URL = process.env.CONSENT_SERVICE_URL || 'http://localhost:8081'
|
||||
const MATRIX_ADMIN_URL = process.env.MATRIX_ADMIN_URL || 'http://localhost:8448'
|
||||
const JITSI_URL = process.env.JITSI_URL || 'http://localhost:8443'
|
||||
|
||||
// Matrix Admin Token (for Synapse Admin API)
|
||||
const MATRIX_ADMIN_TOKEN = process.env.MATRIX_ADMIN_TOKEN || ''
|
||||
|
||||
interface MatrixStats {
|
||||
total_users: number
|
||||
active_users: number
|
||||
total_rooms: number
|
||||
active_rooms: number
|
||||
messages_today: number
|
||||
messages_this_week: number
|
||||
status: 'online' | 'offline' | 'degraded'
|
||||
}
|
||||
|
||||
interface JitsiStats {
|
||||
active_meetings: number
|
||||
total_participants: number
|
||||
meetings_today: number
|
||||
average_duration_minutes: number
|
||||
peak_concurrent_users: number
|
||||
total_minutes_today: number
|
||||
status: 'online' | 'offline' | 'degraded'
|
||||
}
|
||||
|
||||
async function fetchFromBackend(): Promise<{
|
||||
matrix: MatrixStats
|
||||
jitsi: JitsiStats
|
||||
active_meetings: unknown[]
|
||||
recent_rooms: unknown[]
|
||||
} | null> {
|
||||
try {
|
||||
const response = await fetch(`${BACKEND_URL}/api/v1/communication/admin/stats`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
signal: AbortSignal.timeout(5000),
|
||||
})
|
||||
if (response.ok) {
|
||||
return await response.json()
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Backend not reachable, trying consent service:', error)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async function fetchFromConsentService(): Promise<{
|
||||
matrix: MatrixStats
|
||||
jitsi: JitsiStats
|
||||
active_meetings: unknown[]
|
||||
recent_rooms: unknown[]
|
||||
} | null> {
|
||||
try {
|
||||
const response = await fetch(`${CONSENT_SERVICE_URL}/api/v1/communication/admin/stats`, {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
signal: AbortSignal.timeout(5000),
|
||||
})
|
||||
if (response.ok) {
|
||||
return await response.json()
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Consent service not reachable:', error)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async function fetchMatrixStats(): Promise<MatrixStats> {
|
||||
try {
|
||||
// Check if Matrix is reachable
|
||||
const healthCheck = await fetch(`${MATRIX_ADMIN_URL}/_matrix/client/versions`, {
|
||||
signal: AbortSignal.timeout(5000)
|
||||
})
|
||||
|
||||
if (healthCheck.ok) {
|
||||
// Try to get user count from admin API
|
||||
if (MATRIX_ADMIN_TOKEN) {
|
||||
try {
|
||||
const usersResponse = await fetch(`${MATRIX_ADMIN_URL}/_synapse/admin/v2/users?limit=1`, {
|
||||
headers: { 'Authorization': `Bearer ${MATRIX_ADMIN_TOKEN}` },
|
||||
signal: AbortSignal.timeout(5000),
|
||||
})
|
||||
if (usersResponse.ok) {
|
||||
const data = await usersResponse.json()
|
||||
return {
|
||||
total_users: data.total || 0,
|
||||
active_users: 0,
|
||||
total_rooms: 0,
|
||||
active_rooms: 0,
|
||||
messages_today: 0,
|
||||
messages_this_week: 0,
|
||||
status: 'online'
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Admin API not available
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
total_users: 0,
|
||||
active_users: 0,
|
||||
total_rooms: 0,
|
||||
active_rooms: 0,
|
||||
messages_today: 0,
|
||||
messages_this_week: 0,
|
||||
status: 'degraded' // Server reachable but no admin access
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Matrix stats fetch error:', error)
|
||||
}
|
||||
|
||||
return {
|
||||
total_users: 0,
|
||||
active_users: 0,
|
||||
total_rooms: 0,
|
||||
active_rooms: 0,
|
||||
messages_today: 0,
|
||||
messages_this_week: 0,
|
||||
status: 'offline'
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchJitsiStats(): Promise<JitsiStats> {
|
||||
try {
|
||||
// Check if Jitsi is reachable
|
||||
const healthCheck = await fetch(`${JITSI_URL}/http-bind`, {
|
||||
method: 'HEAD',
|
||||
signal: AbortSignal.timeout(5000)
|
||||
})
|
||||
|
||||
return {
|
||||
active_meetings: 0,
|
||||
total_participants: 0,
|
||||
meetings_today: 0,
|
||||
average_duration_minutes: 0,
|
||||
peak_concurrent_users: 0,
|
||||
total_minutes_today: 0,
|
||||
status: healthCheck.ok ? 'online' : 'offline'
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Jitsi stats fetch error:', error)
|
||||
return {
|
||||
active_meetings: 0,
|
||||
total_participants: 0,
|
||||
meetings_today: 0,
|
||||
average_duration_minutes: 0,
|
||||
peak_concurrent_users: 0,
|
||||
total_minutes_today: 0,
|
||||
status: 'offline'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
// Try backend first
|
||||
let data = await fetchFromBackend()
|
||||
|
||||
// Fallback to consent service
|
||||
if (!data) {
|
||||
data = await fetchFromConsentService()
|
||||
}
|
||||
|
||||
// If both fail, try direct service checks
|
||||
if (!data) {
|
||||
const [matrixStats, jitsiStats] = await Promise.all([
|
||||
fetchMatrixStats(),
|
||||
fetchJitsiStats()
|
||||
])
|
||||
|
||||
data = {
|
||||
matrix: matrixStats,
|
||||
jitsi: jitsiStats,
|
||||
active_meetings: [],
|
||||
recent_rooms: []
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
...data,
|
||||
last_updated: new Date().toISOString()
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Communication stats error:', error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Fehler beim Abrufen der Statistiken',
|
||||
matrix: { status: 'offline', total_users: 0, active_users: 0, total_rooms: 0, active_rooms: 0, messages_today: 0, messages_this_week: 0 },
|
||||
jitsi: { status: 'offline', active_meetings: 0, total_participants: 0, meetings_today: 0, average_duration_minutes: 0, peak_concurrent_users: 0, total_minutes_today: 0 },
|
||||
active_meetings: [],
|
||||
recent_rooms: [],
|
||||
last_updated: new Date().toISOString()
|
||||
},
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
173
admin-core/app/api/admin/health/route.ts
Normal file
173
admin-core/app/api/admin/health/route.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Server-side health check proxy
|
||||
* Checks all services via HTTP from the server to avoid mixed-content issues
|
||||
*/
|
||||
|
||||
interface ServiceConfig {
|
||||
name: string
|
||||
port: number
|
||||
endpoint: string
|
||||
category: 'core' | 'ai' | 'database' | 'storage'
|
||||
}
|
||||
|
||||
const SERVICES: ServiceConfig[] = [
|
||||
// Core Services
|
||||
{ name: 'Backend API', port: 8000, endpoint: '/health', category: 'core' },
|
||||
{ name: 'Consent Service', port: 8081, endpoint: '/api/v1/health', category: 'core' },
|
||||
{ name: 'Voice Service', port: 8091, endpoint: '/health', category: 'core' },
|
||||
{ name: 'Klausur Service', port: 8086, endpoint: '/health', category: 'core' },
|
||||
{ name: 'Mail Service (Mailpit)', port: 8025, endpoint: '/api/v1/info', category: 'core' },
|
||||
{ name: 'Edu Search', port: 8088, endpoint: '/health', category: 'core' },
|
||||
{ name: 'H5P Service', port: 8092, endpoint: '/health', category: 'core' },
|
||||
|
||||
// AI Services
|
||||
{ name: 'Ollama/LLM', port: 11434, endpoint: '/api/tags', category: 'ai' },
|
||||
{ name: 'Embedding Service', port: 8087, endpoint: '/health', category: 'ai' },
|
||||
|
||||
// Databases - checked via backend proxy
|
||||
{ name: 'PostgreSQL', port: 5432, endpoint: '', category: 'database' },
|
||||
{ name: 'Qdrant (Vector DB)', port: 6333, endpoint: '/collections', category: 'database' },
|
||||
{ name: 'Valkey (Cache)', port: 6379, endpoint: '', category: 'database' },
|
||||
|
||||
// Storage
|
||||
{ name: 'MinIO (S3)', port: 9000, endpoint: '/minio/health/live', category: 'storage' },
|
||||
]
|
||||
|
||||
// Use internal Docker hostnames when running in container
|
||||
const getInternalHost = (port: number): string => {
|
||||
// Map ports to internal Docker service names
|
||||
const serviceMap: Record<number, string> = {
|
||||
8000: 'backend',
|
||||
8081: 'consent-service',
|
||||
8091: 'voice-service',
|
||||
8086: 'klausur-service',
|
||||
8025: 'mailpit',
|
||||
8088: 'edu-search-service',
|
||||
8092: 'h5p-service',
|
||||
11434: 'ollama',
|
||||
8087: 'embedding-service',
|
||||
5432: 'postgres',
|
||||
6333: 'qdrant',
|
||||
6379: 'valkey',
|
||||
9000: 'minio',
|
||||
}
|
||||
|
||||
// In container, use Docker hostnames; otherwise use localhost
|
||||
const host = process.env.BACKEND_URL ? serviceMap[port] || 'localhost' : 'localhost'
|
||||
return host
|
||||
}
|
||||
|
||||
async function checkService(service: ServiceConfig): Promise<{
|
||||
name: string
|
||||
port: number
|
||||
category: string
|
||||
status: 'online' | 'offline' | 'degraded'
|
||||
responseTime: number
|
||||
details?: string
|
||||
}> {
|
||||
const startTime = Date.now()
|
||||
|
||||
try {
|
||||
// Special handling for PostgreSQL - check via backend
|
||||
if (service.port === 5432) {
|
||||
const backendHost = getInternalHost(8000)
|
||||
const response = await fetch(`http://${backendHost}:8000/api/tests/db-status`, {
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(3000),
|
||||
})
|
||||
const responseTime = Date.now() - startTime
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
return {
|
||||
...service,
|
||||
status: 'online',
|
||||
responseTime,
|
||||
details: data.host || undefined
|
||||
}
|
||||
}
|
||||
return { ...service, status: 'offline', responseTime }
|
||||
}
|
||||
|
||||
// Special handling for Valkey - check via backend
|
||||
if (service.port === 6379) {
|
||||
const backendHost = getInternalHost(8000)
|
||||
try {
|
||||
const response = await fetch(`http://${backendHost}:8000/api/tests/cache-status`, {
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(3000),
|
||||
})
|
||||
const responseTime = Date.now() - startTime
|
||||
|
||||
if (response.ok) {
|
||||
return { ...service, status: 'online', responseTime }
|
||||
}
|
||||
} catch {
|
||||
// Fallback: assume online if backend is reachable (Valkey is usually bundled)
|
||||
}
|
||||
const responseTime = Date.now() - startTime
|
||||
return { ...service, status: 'online', responseTime, details: 'via Backend' }
|
||||
}
|
||||
|
||||
const host = getInternalHost(service.port)
|
||||
const url = `http://${host}:${service.port}${service.endpoint}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(5000),
|
||||
})
|
||||
|
||||
const responseTime = Date.now() - startTime
|
||||
|
||||
if (response.ok) {
|
||||
// Special handling for Ollama
|
||||
if (service.port === 11434) {
|
||||
try {
|
||||
const data = await response.json()
|
||||
const modelCount = data.models?.length || 0
|
||||
return {
|
||||
...service,
|
||||
status: 'online',
|
||||
responseTime,
|
||||
details: `${modelCount} Modell${modelCount !== 1 ? 'e' : ''} geladen`
|
||||
}
|
||||
} catch {
|
||||
return { ...service, status: 'online', responseTime }
|
||||
}
|
||||
}
|
||||
return { ...service, status: 'online', responseTime }
|
||||
} else if (response.status >= 500) {
|
||||
return { ...service, status: 'degraded', responseTime, details: `HTTP ${response.status}` }
|
||||
} else {
|
||||
return { ...service, status: 'offline', responseTime }
|
||||
}
|
||||
} catch (error) {
|
||||
const responseTime = Date.now() - startTime
|
||||
return {
|
||||
...service,
|
||||
status: 'offline',
|
||||
responseTime,
|
||||
details: error instanceof Error ? error.message : 'Verbindungsfehler'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const results = await Promise.all(SERVICES.map(checkService))
|
||||
|
||||
return NextResponse.json({
|
||||
services: results,
|
||||
timestamp: new Date().toISOString(),
|
||||
onlineCount: results.filter(s => s.status === 'online').length,
|
||||
totalCount: results.length
|
||||
})
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to check services', details: error instanceof Error ? error.message : 'Unknown error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
338
admin-core/app/api/admin/infrastructure/mac-mini/route.ts
Normal file
338
admin-core/app/api/admin/infrastructure/mac-mini/route.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Mac Mini System Monitoring API
|
||||
*
|
||||
* Provides system stats and Docker container management
|
||||
* Requires Docker socket mounted at /var/run/docker.sock
|
||||
*/
|
||||
|
||||
interface ContainerInfo {
|
||||
id: string
|
||||
name: string
|
||||
image: string
|
||||
status: string
|
||||
state: string
|
||||
created: string
|
||||
ports: string[]
|
||||
cpu_percent: number
|
||||
memory_usage: string
|
||||
memory_limit: string
|
||||
memory_percent: number
|
||||
network_rx: string
|
||||
network_tx: string
|
||||
}
|
||||
|
||||
interface SystemStats {
|
||||
hostname: string
|
||||
platform: string
|
||||
arch: string
|
||||
uptime: number
|
||||
cpu: {
|
||||
model: string
|
||||
cores: number
|
||||
usage_percent: number
|
||||
}
|
||||
memory: {
|
||||
total: string
|
||||
used: string
|
||||
free: string
|
||||
usage_percent: number
|
||||
}
|
||||
disk: {
|
||||
total: string
|
||||
used: string
|
||||
free: string
|
||||
usage_percent: number
|
||||
}
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
interface DockerStats {
|
||||
containers: ContainerInfo[]
|
||||
total_containers: number
|
||||
running_containers: number
|
||||
stopped_containers: number
|
||||
}
|
||||
|
||||
// Helper to format bytes
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B'
|
||||
const k = 1024
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]
|
||||
}
|
||||
|
||||
// Helper to format uptime
|
||||
function formatUptime(seconds: number): string {
|
||||
const days = Math.floor(seconds / 86400)
|
||||
const hours = Math.floor((seconds % 86400) / 3600)
|
||||
const minutes = Math.floor((seconds % 3600) / 60)
|
||||
if (days > 0) return `${days}d ${hours}h ${minutes}m`
|
||||
if (hours > 0) return `${hours}h ${minutes}m`
|
||||
return `${minutes}m`
|
||||
}
|
||||
|
||||
// Get Docker stats via socket
|
||||
async function getDockerStats(): Promise<DockerStats> {
|
||||
const DOCKER_SOCKET = process.env.DOCKER_HOST || 'unix:///var/run/docker.sock'
|
||||
|
||||
try {
|
||||
// Fetch container list
|
||||
const containersResponse = await fetch(`${DOCKER_SOCKET.replace('unix://', 'http://localhost')}/containers/json?all=true`, {
|
||||
// @ts-expect-error - Node.js fetch supports unix sockets via socketPath
|
||||
socketPath: '/var/run/docker.sock',
|
||||
})
|
||||
|
||||
if (!containersResponse.ok) {
|
||||
throw new Error('Failed to fetch containers')
|
||||
}
|
||||
|
||||
const containers = await containersResponse.json()
|
||||
|
||||
// Get stats for running containers
|
||||
const containerInfos: ContainerInfo[] = await Promise.all(
|
||||
containers.map(async (container: Record<string, unknown>) => {
|
||||
const names = container.Names as string[]
|
||||
const name = names?.[0]?.replace(/^\//, '') || 'unknown'
|
||||
const state = container.State as string
|
||||
const status = container.Status as string
|
||||
const image = container.Image as string
|
||||
const created = container.Created as number
|
||||
const ports = container.Ports as Array<{ PrivatePort: number; PublicPort?: number; Type: string }>
|
||||
|
||||
let cpu_percent = 0
|
||||
let memory_usage = '0 B'
|
||||
let memory_limit = '0 B'
|
||||
let memory_percent = 0
|
||||
let network_rx = '0 B'
|
||||
let network_tx = '0 B'
|
||||
|
||||
// Get live stats for running containers
|
||||
if (state === 'running') {
|
||||
try {
|
||||
const statsResponse = await fetch(
|
||||
`http://localhost/containers/${container.Id}/stats?stream=false`,
|
||||
{
|
||||
// @ts-expect-error - Node.js fetch supports unix sockets
|
||||
socketPath: '/var/run/docker.sock',
|
||||
}
|
||||
)
|
||||
|
||||
if (statsResponse.ok) {
|
||||
const stats = await statsResponse.json()
|
||||
|
||||
// Calculate CPU usage
|
||||
const cpuDelta = stats.cpu_stats.cpu_usage.total_usage -
|
||||
(stats.precpu_stats?.cpu_usage?.total_usage || 0)
|
||||
const systemDelta = stats.cpu_stats.system_cpu_usage -
|
||||
(stats.precpu_stats?.system_cpu_usage || 0)
|
||||
const cpuCount = stats.cpu_stats.online_cpus || 1
|
||||
|
||||
if (systemDelta > 0 && cpuDelta > 0) {
|
||||
cpu_percent = (cpuDelta / systemDelta) * cpuCount * 100
|
||||
}
|
||||
|
||||
// Memory usage
|
||||
const memUsage = stats.memory_stats?.usage || 0
|
||||
const memLimit = stats.memory_stats?.limit || 0
|
||||
memory_usage = formatBytes(memUsage)
|
||||
memory_limit = formatBytes(memLimit)
|
||||
memory_percent = memLimit > 0 ? (memUsage / memLimit) * 100 : 0
|
||||
|
||||
// Network stats
|
||||
const networks = stats.networks || {}
|
||||
let rxBytes = 0
|
||||
let txBytes = 0
|
||||
Object.values(networks).forEach((net: unknown) => {
|
||||
const network = net as { rx_bytes?: number; tx_bytes?: number }
|
||||
rxBytes += network.rx_bytes || 0
|
||||
txBytes += network.tx_bytes || 0
|
||||
})
|
||||
network_rx = formatBytes(rxBytes)
|
||||
network_tx = formatBytes(txBytes)
|
||||
}
|
||||
} catch {
|
||||
// Stats not available, use defaults
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: (container.Id as string).substring(0, 12),
|
||||
name,
|
||||
image: (image as string).split(':')[0].split('/').pop() || image,
|
||||
status,
|
||||
state,
|
||||
created: new Date(created * 1000).toISOString(),
|
||||
ports: ports?.map(p =>
|
||||
p.PublicPort ? `${p.PublicPort}:${p.PrivatePort}/${p.Type}` : `${p.PrivatePort}/${p.Type}`
|
||||
) || [],
|
||||
cpu_percent: Math.round(cpu_percent * 100) / 100,
|
||||
memory_usage,
|
||||
memory_limit,
|
||||
memory_percent: Math.round(memory_percent * 100) / 100,
|
||||
network_rx,
|
||||
network_tx,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// Sort by name
|
||||
containerInfos.sort((a, b) => a.name.localeCompare(b.name))
|
||||
|
||||
return {
|
||||
containers: containerInfos,
|
||||
total_containers: containerInfos.length,
|
||||
running_containers: containerInfos.filter(c => c.state === 'running').length,
|
||||
stopped_containers: containerInfos.filter(c => c.state !== 'running').length,
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Docker stats error:', error)
|
||||
// Return empty stats if Docker socket not available
|
||||
return {
|
||||
containers: [],
|
||||
total_containers: 0,
|
||||
running_containers: 0,
|
||||
stopped_containers: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get system stats
|
||||
async function getSystemStats(): Promise<SystemStats> {
|
||||
const os = await import('os')
|
||||
|
||||
const cpus = os.cpus()
|
||||
const totalMem = os.totalmem()
|
||||
const freeMem = os.freemem()
|
||||
const usedMem = totalMem - freeMem
|
||||
|
||||
// Calculate CPU usage from cpus
|
||||
let totalIdle = 0
|
||||
let totalTick = 0
|
||||
cpus.forEach(cpu => {
|
||||
for (const type in cpu.times) {
|
||||
totalTick += cpu.times[type as keyof typeof cpu.times]
|
||||
}
|
||||
totalIdle += cpu.times.idle
|
||||
})
|
||||
const cpuUsage = 100 - (totalIdle / totalTick * 100)
|
||||
|
||||
// Disk stats (root partition)
|
||||
let diskTotal = 0
|
||||
let diskUsed = 0
|
||||
let diskFree = 0
|
||||
|
||||
try {
|
||||
const { execSync } = await import('child_process')
|
||||
const dfOutput = execSync('df -k / | tail -1').toString()
|
||||
const parts = dfOutput.split(/\s+/)
|
||||
diskTotal = parseInt(parts[1]) * 1024
|
||||
diskUsed = parseInt(parts[2]) * 1024
|
||||
diskFree = parseInt(parts[3]) * 1024
|
||||
} catch {
|
||||
// Disk stats not available
|
||||
}
|
||||
|
||||
return {
|
||||
hostname: os.hostname(),
|
||||
platform: os.platform(),
|
||||
arch: os.arch(),
|
||||
uptime: os.uptime(),
|
||||
cpu: {
|
||||
model: cpus[0]?.model || 'Unknown',
|
||||
cores: cpus.length,
|
||||
usage_percent: Math.round(cpuUsage * 100) / 100,
|
||||
},
|
||||
memory: {
|
||||
total: formatBytes(totalMem),
|
||||
used: formatBytes(usedMem),
|
||||
free: formatBytes(freeMem),
|
||||
usage_percent: Math.round((usedMem / totalMem) * 100 * 100) / 100,
|
||||
},
|
||||
disk: {
|
||||
total: formatBytes(diskTotal),
|
||||
used: formatBytes(diskUsed),
|
||||
free: formatBytes(diskFree),
|
||||
usage_percent: diskTotal > 0 ? Math.round((diskUsed / diskTotal) * 100 * 100) / 100 : 0,
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
// Container action (start/stop/restart)
|
||||
async function containerAction(containerId: string, action: 'start' | 'stop' | 'restart'): Promise<void> {
|
||||
const response = await fetch(
|
||||
`http://localhost/containers/${containerId}/${action}`,
|
||||
{
|
||||
method: 'POST',
|
||||
// @ts-expect-error - Node.js fetch supports unix sockets
|
||||
socketPath: '/var/run/docker.sock',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok && response.status !== 304) {
|
||||
const error = await response.text()
|
||||
throw new Error(`Failed to ${action} container: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
// GET - Fetch system and Docker stats
|
||||
export async function GET() {
|
||||
try {
|
||||
const [system, docker] = await Promise.all([
|
||||
getSystemStats(),
|
||||
getDockerStats(),
|
||||
])
|
||||
|
||||
return NextResponse.json({
|
||||
system,
|
||||
docker,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Mac Mini stats error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: error instanceof Error ? error.message : 'Unknown error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// POST - Container actions (start/stop/restart)
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { container_id, action } = body
|
||||
|
||||
if (!container_id || !action) {
|
||||
return NextResponse.json(
|
||||
{ error: 'container_id and action required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['start', 'stop', 'restart'].includes(action)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid action. Use: start, stop, restart' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
await containerAction(container_id, action)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Container ${action} successful`,
|
||||
container_id,
|
||||
action,
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Container action error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: error instanceof Error ? error.message : 'Action failed' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
208
admin-core/app/api/admin/infrastructure/woodpecker/route.ts
Normal file
208
admin-core/app/api/admin/infrastructure/woodpecker/route.ts
Normal file
@@ -0,0 +1,208 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
// Woodpecker API configuration
|
||||
const WOODPECKER_URL = process.env.WOODPECKER_URL || 'http://woodpecker-server:8000'
|
||||
const WOODPECKER_TOKEN = process.env.WOODPECKER_TOKEN || ''
|
||||
|
||||
export interface PipelineStep {
|
||||
name: string
|
||||
state: 'pending' | 'running' | 'success' | 'failure' | 'skipped'
|
||||
exit_code: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface Pipeline {
|
||||
id: number
|
||||
number: number
|
||||
status: 'pending' | 'running' | 'success' | 'failure' | 'error'
|
||||
event: string
|
||||
branch: string
|
||||
commit: string
|
||||
message: string
|
||||
author: string
|
||||
created: number
|
||||
started: number
|
||||
finished: number
|
||||
steps: PipelineStep[]
|
||||
errors?: string[]
|
||||
}
|
||||
|
||||
export interface WoodpeckerStatusResponse {
|
||||
status: 'online' | 'offline'
|
||||
pipelines: Pipeline[]
|
||||
lastUpdate: string
|
||||
error?: string
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const searchParams = request.nextUrl.searchParams
|
||||
const repoId = searchParams.get('repo') || '1'
|
||||
const limit = parseInt(searchParams.get('limit') || '10')
|
||||
|
||||
try {
|
||||
// Fetch pipelines from Woodpecker API
|
||||
const response = await fetch(
|
||||
`${WOODPECKER_URL}/api/repos/${repoId}/pipelines?per_page=${limit}`,
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Bearer ${WOODPECKER_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
cache: 'no-store',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json({
|
||||
status: 'offline',
|
||||
pipelines: [],
|
||||
lastUpdate: new Date().toISOString(),
|
||||
error: `Woodpecker API nicht erreichbar (${response.status})`
|
||||
} as WoodpeckerStatusResponse)
|
||||
}
|
||||
|
||||
const rawPipelines = await response.json()
|
||||
|
||||
// Transform pipelines to our format
|
||||
const pipelines: Pipeline[] = rawPipelines.map((p: any) => {
|
||||
// Extract errors from workflows/steps
|
||||
const errors: string[] = []
|
||||
const steps: PipelineStep[] = []
|
||||
|
||||
if (p.workflows) {
|
||||
for (const workflow of p.workflows) {
|
||||
if (workflow.children) {
|
||||
for (const child of workflow.children) {
|
||||
steps.push({
|
||||
name: child.name,
|
||||
state: child.state,
|
||||
exit_code: child.exit_code,
|
||||
error: child.error
|
||||
})
|
||||
if (child.state === 'failure' && child.error) {
|
||||
errors.push(`${child.name}: ${child.error}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: p.id,
|
||||
number: p.number,
|
||||
status: p.status,
|
||||
event: p.event,
|
||||
branch: p.branch,
|
||||
commit: p.commit?.substring(0, 7) || '',
|
||||
message: p.message || '',
|
||||
author: p.author,
|
||||
created: p.created,
|
||||
started: p.started,
|
||||
finished: p.finished,
|
||||
steps,
|
||||
errors: errors.length > 0 ? errors : undefined
|
||||
}
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
status: 'online',
|
||||
pipelines,
|
||||
lastUpdate: new Date().toISOString()
|
||||
} as WoodpeckerStatusResponse)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Woodpecker API error:', error)
|
||||
return NextResponse.json({
|
||||
status: 'offline',
|
||||
pipelines: [],
|
||||
lastUpdate: new Date().toISOString(),
|
||||
error: 'Fehler beim Abrufen des Woodpecker Status'
|
||||
} as WoodpeckerStatusResponse)
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger a new pipeline
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { repoId = '1', branch = 'main' } = body
|
||||
|
||||
const response = await fetch(
|
||||
`${WOODPECKER_URL}/api/repos/${repoId}/pipelines`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${WOODPECKER_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ branch }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Pipeline konnte nicht gestartet werden' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const pipeline = await response.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
pipeline: {
|
||||
id: pipeline.id,
|
||||
number: pipeline.number,
|
||||
status: pipeline.status
|
||||
}
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
console.error('Pipeline trigger error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Fehler beim Starten der Pipeline' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Get pipeline logs
|
||||
export async function PUT(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { repoId = '1', pipelineNumber, stepId } = body
|
||||
|
||||
if (!pipelineNumber || !stepId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'pipelineNumber und stepId erforderlich' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`${WOODPECKER_URL}/api/repos/${repoId}/pipelines/${pipelineNumber}/logs/${stepId}`,
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Bearer ${WOODPECKER_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Logs nicht verfuegbar' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const logs = await response.json()
|
||||
return NextResponse.json({ logs })
|
||||
|
||||
} catch (error) {
|
||||
console.error('Pipeline logs error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Fehler beim Abrufen der Logs' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
81
admin-core/app/api/admin/mail/route.ts
Normal file
81
admin-core/app/api/admin/mail/route.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Server-side proxy for Mailpit API
|
||||
* Avoids CORS and mixed-content issues by fetching from server
|
||||
*/
|
||||
|
||||
// Use internal Docker hostname when running in container
|
||||
const getMailpitHost = (): string => {
|
||||
return process.env.BACKEND_URL ? 'mailpit' : 'localhost'
|
||||
}
|
||||
|
||||
export async function GET() {
|
||||
const host = getMailpitHost()
|
||||
const mailpitUrl = `http://${host}:8025/api/v1/info`
|
||||
|
||||
try {
|
||||
const response = await fetch(mailpitUrl, {
|
||||
method: 'GET',
|
||||
signal: AbortSignal.timeout(5000),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Mailpit API error', status: response.status },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Transform Mailpit response to our expected format
|
||||
return NextResponse.json({
|
||||
stats: {
|
||||
totalAccounts: 1,
|
||||
activeAccounts: 1,
|
||||
totalEmails: data.Messages || 0,
|
||||
unreadEmails: data.Unread || 0,
|
||||
totalTasks: 0,
|
||||
pendingTasks: 0,
|
||||
overdueTasks: 0,
|
||||
aiAnalyzedCount: 0,
|
||||
lastSyncTime: new Date().toISOString(),
|
||||
},
|
||||
accounts: [{
|
||||
id: 'mailpit-dev',
|
||||
email: 'dev@mailpit.local',
|
||||
displayName: 'Mailpit (Development)',
|
||||
imapHost: 'mailpit',
|
||||
imapPort: 1143,
|
||||
smtpHost: 'mailpit',
|
||||
smtpPort: 1025,
|
||||
status: 'active' as const,
|
||||
lastSync: new Date().toISOString(),
|
||||
emailCount: data.Messages || 0,
|
||||
unreadCount: data.Unread || 0,
|
||||
createdAt: new Date().toISOString(),
|
||||
}],
|
||||
syncStatus: {
|
||||
running: false,
|
||||
accountsInProgress: [],
|
||||
lastCompleted: new Date().toISOString(),
|
||||
errors: [],
|
||||
},
|
||||
mailpitInfo: {
|
||||
version: data.Version,
|
||||
databaseSize: data.DatabaseSize,
|
||||
uptime: data.RuntimeStats?.Uptime,
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch from Mailpit:', error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to connect to Mailpit',
|
||||
details: error instanceof Error ? error.message : 'Unknown error'
|
||||
},
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
172
admin-core/app/api/admin/middleware/[...path]/route.ts
Normal file
172
admin-core/app/api/admin/middleware/[...path]/route.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
/**
|
||||
* Middleware Admin API Proxy - Catch-all route
|
||||
* Proxies all /api/admin/middleware/* requests to backend
|
||||
* Forwards authentication cookies for session-based auth
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
function getForwardHeaders(request: NextRequest): HeadersInit {
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
// Forward cookie for session auth
|
||||
const cookie = request.headers.get('cookie')
|
||||
if (cookie) {
|
||||
headers['Cookie'] = cookie
|
||||
}
|
||||
|
||||
// Forward authorization header if present
|
||||
const auth = request.headers.get('authorization')
|
||||
if (auth) {
|
||||
headers['Authorization'] = auth
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
const pathStr = path.join('/')
|
||||
const searchParams = request.nextUrl.searchParams.toString()
|
||||
const url = `${BACKEND_URL}/api/admin/middleware/${pathStr}${searchParams ? `?${searchParams}` : ''}`
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getForwardHeaders(request),
|
||||
signal: AbortSignal.timeout(30000)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Middleware API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
const pathStr = path.join('/')
|
||||
const url = `${BACKEND_URL}/api/admin/middleware/${pathStr}`
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: getForwardHeaders(request),
|
||||
body: JSON.stringify(body),
|
||||
signal: AbortSignal.timeout(30000)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Middleware API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
const pathStr = path.join('/')
|
||||
const url = `${BACKEND_URL}/api/admin/middleware/${pathStr}`
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: getForwardHeaders(request),
|
||||
body: JSON.stringify(body),
|
||||
signal: AbortSignal.timeout(30000)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Middleware API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ path: string[] }> }
|
||||
) {
|
||||
const { path } = await params
|
||||
const pathStr = path.join('/')
|
||||
const url = `${BACKEND_URL}/api/admin/middleware/${pathStr}`
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: getForwardHeaders(request),
|
||||
signal: AbortSignal.timeout(30000)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Middleware API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
59
admin-core/app/api/admin/middleware/route.ts
Normal file
59
admin-core/app/api/admin/middleware/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Middleware Admin API Proxy - Base route
|
||||
* GET /api/admin/middleware -> GET all middleware configs
|
||||
* Forwards authentication cookies for session-based auth
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
function getForwardHeaders(request: NextRequest): HeadersInit {
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
// Forward cookie for session auth
|
||||
const cookie = request.headers.get('cookie')
|
||||
if (cookie) {
|
||||
headers['Cookie'] = cookie
|
||||
}
|
||||
|
||||
// Forward authorization header if present
|
||||
const auth = request.headers.get('authorization')
|
||||
if (auth) {
|
||||
headers['Authorization'] = auth
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const searchParams = request.nextUrl.searchParams.toString()
|
||||
const url = `${BACKEND_URL}/api/admin/middleware${searchParams ? `?${searchParams}` : ''}`
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getForwardHeaders(request),
|
||||
signal: AbortSignal.timeout(30000)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return NextResponse.json(
|
||||
{ error: `Backend Error: ${response.status}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Middleware API proxy error:', error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Verbindung zum Backend fehlgeschlagen' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user