Files
breakpilot-lehrer/website/app/admin/mac-mini/page.tsx
Benjamin Admin b6983ab1dc [split-required] Split 500-1000 LOC files across all services
backend-lehrer (5 files):
- alerts_agent/db/repository.py (992 → 5), abitur_docs_api.py (956 → 3)
- teacher_dashboard_api.py (951 → 3), services/pdf_service.py (916 → 3)
- mail/mail_db.py (987 → 6)

klausur-service (5 files):
- legal_templates_ingestion.py (942 → 3), ocr_pipeline_postprocess.py (929 → 4)
- ocr_pipeline_words.py (876 → 3), ocr_pipeline_ocr_merge.py (616 → 2)
- KorrekturPage.tsx (956 → 6)

website (5 pages):
- mail (985 → 9), edu-search (958 → 8), mac-mini (950 → 7)
- ocr-labeling (946 → 7), audit-workspace (871 → 4)

studio-v2 (5 files + 1 deleted):
- page.tsx (946 → 5), MessagesContext.tsx (925 → 4)
- korrektur (914 → 6), worksheet-cleanup (899 → 6)
- useVocabWorksheet.ts (888 → 3)
- Deleted dead page-original.tsx (934 LOC)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-24 23:35:37 +02:00

227 lines
7.5 KiB
TypeScript

'use client'
/**
* Mac Mini Control Admin Page
*
* Headless Mac Mini Server Management
* - Power Controls (Wake-on-LAN, Restart, Shutdown)
* - Docker Container Management
* - Ollama LLM Model Management
* - System Status Monitoring
*/
import AdminLayout from '@/components/admin/AdminLayout'
import { useEffect, useState, useCallback, useRef } from 'react'
import type { MacMiniStatus, DownloadProgress } from './types'
import { API_BASE } from './constants'
import PowerControls from './_components/PowerControls'
import InternetStatus from './_components/InternetStatus'
import DockerSection from './_components/DockerSection'
import OllamaSection from './_components/OllamaSection'
export default function MacMiniControlPage() {
const [status, setStatus] = useState<MacMiniStatus | null>(null)
const [loading, setLoading] = useState(true)
const [actionLoading, setActionLoading] = useState<string | null>(null)
const [error, setError] = useState<string | null>(null)
const [message, setMessage] = useState<string | null>(null)
const [downloadProgress, setDownloadProgress] = useState<DownloadProgress | null>(null)
const [modelInput, setModelInput] = useState('')
const eventSourceRef = useRef<EventSource | null>(null)
const fetchStatus = useCallback(async () => {
setLoading(true)
setError(null)
try {
const response = await fetch(`${API_BASE}/status`)
const data = await response.json()
if (!response.ok) throw new Error(data.detail || `HTTP ${response.status}`)
setStatus(data)
} catch (err) {
setError(err instanceof Error ? err.message : 'Verbindungsfehler')
setStatus({
online: false, ping: false, ssh: false, docker: false,
ollama: false, internet: false, ip: '192.168.178.100',
error: 'Verbindung fehlgeschlagen'
})
} finally {
setLoading(false)
}
}, [])
useEffect(() => { fetchStatus() }, [fetchStatus])
useEffect(() => {
const interval = setInterval(fetchStatus, 30000)
return () => clearInterval(interval)
}, [fetchStatus])
const performAction = async (action: string, endpoint: string, confirmMsg?: string) => {
if (confirmMsg && !confirm(confirmMsg)) return
setActionLoading(action)
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/${endpoint}`, { method: 'POST' })
const data = await response.json()
if (!response.ok) throw new Error(data.detail || `${action} fehlgeschlagen`)
return data
} catch (err) {
setError(err instanceof Error ? err.message : `Fehler bei ${action}`)
return null
} finally {
setActionLoading(null)
}
}
const wakeOnLan = async () => {
const result = await performAction('wake', 'wake')
if (result) {
setMessage('Wake-on-LAN Paket gesendet')
setTimeout(fetchStatus, 5000)
setTimeout(fetchStatus, 15000)
}
}
const restart = async () => {
const result = await performAction('restart', 'restart', 'Mac Mini wirklich neu starten?')
if (result) {
setMessage('Neustart eingeleitet')
setTimeout(fetchStatus, 30000)
}
}
const shutdown = async () => {
const result = await performAction('shutdown', 'shutdown', 'Mac Mini wirklich herunterfahren?')
if (result) {
setMessage('Shutdown eingeleitet')
setTimeout(fetchStatus, 10000)
}
}
const dockerUp = async () => {
const result = await performAction('docker-up', 'docker/up')
if (result) {
setMessage('Docker Container werden gestartet...')
setTimeout(fetchStatus, 5000)
}
}
const dockerDown = async () => {
const result = await performAction('docker-down', 'docker/down', 'Docker Container wirklich stoppen?')
if (result) {
setMessage('Docker Container werden gestoppt...')
setTimeout(fetchStatus, 5000)
}
}
const pullModel = async () => {
if (!modelInput.trim()) return
setActionLoading('pull')
setError(null)
setMessage(null)
setDownloadProgress({ model: modelInput, status: 'starting', completed: 0, total: 0, percent: 0 })
try {
if (eventSourceRef.current) eventSourceRef.current.close()
const response = await fetch(`${API_BASE}/ollama/pull`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ model: modelInput })
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.detail || 'Model Pull fehlgeschlagen')
}
const reader = response.body?.getReader()
const decoder = new TextDecoder()
if (reader) {
while (true) {
const { done, value } = await reader.read()
if (done) break
const text = decoder.decode(value)
const lines = text.split('\n').filter(line => line.trim())
for (const line of lines) {
try {
const data = JSON.parse(line)
if (data.status === 'downloading' && data.total) {
setDownloadProgress({
model: modelInput, status: data.status,
completed: data.completed || 0, total: data.total,
percent: Math.round((data.completed || 0) / data.total * 100)
})
} else if (data.status === 'success') {
setMessage(`Modell ${modelInput} erfolgreich heruntergeladen`)
setDownloadProgress(null)
setModelInput('')
fetchStatus()
} else if (data.error) {
throw new Error(data.error)
}
} catch (e) { /* Skip parsing errors for incomplete chunks */ }
}
}
}
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Model Download')
setDownloadProgress(null)
} finally {
setActionLoading(null)
}
}
return (
<AdminLayout title="Mac Mini Control" description="Headless Server Management">
<PowerControls
status={status}
loading={loading}
actionLoading={actionLoading}
message={message}
error={error}
onWake={wakeOnLan}
onRestart={restart}
onShutdown={shutdown}
onRefresh={fetchStatus}
/>
<InternetStatus internet={status?.internet} />
<DockerSection
status={status}
actionLoading={actionLoading}
onDockerUp={dockerUp}
onDockerDown={dockerDown}
/>
<OllamaSection
status={status}
actionLoading={actionLoading}
downloadProgress={downloadProgress}
modelInput={modelInput}
setModelInput={setModelInput}
onPullModel={pullModel}
/>
{/* Info */}
<div className="mt-6 bg-blue-50 border border-blue-200 rounded-xl p-4">
<div className="flex gap-3">
<svg className="w-5 h-5 text-blue-600 flex-shrink-0 mt-0.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<div>
<h4 className="font-semibold text-blue-900">Mac Mini Headless Server</h4>
<p className="text-sm text-blue-800 mt-1">
Der Mac Mini läuft ohne Monitor im LAN (192.168.178.100). Er hostet Docker-Container
für das Backend, Ollama für lokale LLM-Verarbeitung und weitere Services.
Wake-on-LAN ermöglicht das Remote-Einschalten.
</p>
</div>
</div>
</div>
</AdminLayout>
)
}