fix: Restore all files lost during destructive rebase

A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-02-09 09:51:32 +01:00
parent f7487ee240
commit bfdaf63ba9
2009 changed files with 749983 additions and 1731 deletions

View File

@@ -0,0 +1,950 @@
'use client'
/**
* Mac Mini Control Admin Page
*
* Headless Mac Mini Server Management
* - Power Controls (Wake-on-LAN, Restart, Shutdown)
* - Docker Container Management
* - Ollama LLM Model Management
* - System Status Monitoring
*/
import AdminLayout from '@/components/admin/AdminLayout'
import { useEffect, useState, useCallback, useRef } from 'react'
interface MacMiniStatus {
online: boolean
ping: boolean
ssh: boolean
docker: boolean
ollama: boolean
internet: boolean // Neuer Status: Hat Mac Mini Internet-Zugang?
ip: string
uptime?: string
cpu_load?: string
memory?: string
containers?: ContainerInfo[]
models?: ModelInfo[]
error?: string
}
// Aktionen die Internet benötigen
const INTERNET_REQUIRED_ACTIONS = [
{ action: 'LLM Modelle herunterladen', description: 'Ollama pull benötigt Verbindung zu ollama.com' },
{ action: 'Docker Base Images pullen', description: 'Neue Images von Docker Hub/GHCR' },
{ action: 'npm/pip/go Packages', description: 'Beim ersten Build oder neuen Dependencies' },
{ action: 'Git Pull/Push', description: 'Code-Synchronisation mit Remote-Repository' },
]
interface ContainerInfo {
name: string
status: string
ports?: string
}
interface ModelInfo {
name: string
size: string
modified: string
}
interface DownloadProgress {
model: string
status: string
completed: number
total: number
percent: number
}
// Modell-Informationen für Beschreibungen und Empfehlungen
interface ModelDescription {
name: string
category: 'vision' | 'text' | 'code' | 'embedding'
size: string
description: string
useCases: string[]
recommended?: boolean
}
const MODEL_DATABASE: Record<string, ModelDescription> = {
// Vision-Modelle (Handschrifterkennung)
'llama3.2-vision:11b': {
name: 'Llama 3.2 Vision 11B',
category: 'vision',
size: '7.8 GB',
description: 'Metas multimodales Vision-Modell. Kann Bilder und PDFs analysieren, Text aus Handschrift extrahieren.',
useCases: ['Handschrifterkennung', 'Bild-Analyse', 'Dokumentenverarbeitung', 'OCR-Aufgaben'],
recommended: true
},
'llama3.2-vision:90b': {
name: 'Llama 3.2 Vision 90B',
category: 'vision',
size: '55 GB',
description: 'Größte Version von Llama Vision. Beste Qualität für komplexe Bildanalyse.',
useCases: ['Komplexe Handschrift', 'Detaillierte Bild-Analyse', 'Mathematische Formeln'],
},
'minicpm-v': {
name: 'MiniCPM-V',
category: 'vision',
size: '5.5 GB',
description: 'Kompaktes Vision-Modell mit gutem Preis-Leistungs-Verhältnis für OCR.',
useCases: ['Schnelle OCR', 'Einfache Handschrift', 'Tabellen-Erkennung'],
recommended: true
},
'llava:13b': {
name: 'LLaVA 13B',
category: 'vision',
size: '8 GB',
description: 'Large Language-and-Vision Assistant. Gut für Bild-zu-Text Aufgaben.',
useCases: ['Bildbeschreibung', 'Handschrift', 'Diagramm-Analyse'],
},
'llava:34b': {
name: 'LLaVA 34B',
category: 'vision',
size: '20 GB',
description: 'Größere LLaVA-Version mit besserer Genauigkeit.',
useCases: ['Komplexe Dokumente', 'Wissenschaftliche Notation', 'Detailanalyse'],
},
'bakllava': {
name: 'BakLLaVA',
category: 'vision',
size: '4.7 GB',
description: 'Verbesserte LLaVA-Variante mit Mistral-Basis.',
useCases: ['Schnelle Bildanalyse', 'Handschrift', 'Formular-Verarbeitung'],
},
// Text-Modelle (Klausurkorrektur)
'qwen2.5:14b': {
name: 'Qwen 2.5 14B',
category: 'text',
size: '9 GB',
description: 'Alibabas neuestes Sprachmodell. Exzellent für deutsche Texte und Bewertungsaufgaben.',
useCases: ['Klausurkorrektur', 'Aufsatzbewertung', 'Feedback-Generierung', 'Grammatikprüfung'],
recommended: true
},
'qwen2.5:7b': {
name: 'Qwen 2.5 7B',
category: 'text',
size: '4.7 GB',
description: 'Kleinere Qwen-Version, schneller bei ähnlicher Qualität.',
useCases: ['Schnelle Korrektur', 'Einfache Bewertungen', 'Rechtschreibprüfung'],
},
'qwen2.5:32b': {
name: 'Qwen 2.5 32B',
category: 'text',
size: '19 GB',
description: 'Große Qwen-Version für komplexe Bewertungsaufgaben.',
useCases: ['Detaillierte Analyse', 'Abitur-Klausuren', 'Komplexe Argumentation'],
},
'llama3.1:8b': {
name: 'Llama 3.1 8B',
category: 'text',
size: '4.7 GB',
description: 'Metas schnelles Textmodell. Gute Balance aus Geschwindigkeit und Qualität.',
useCases: ['Allgemeine Korrektur', 'Schnelles Feedback', 'Zusammenfassungen'],
},
'llama3.1:70b': {
name: 'Llama 3.1 70B',
category: 'text',
size: '40 GB',
description: 'Großes Llama-Modell für anspruchsvolle Aufgaben.',
useCases: ['Komplexe Klausuren', 'Tiefgehende Analyse', 'Wissenschaftliche Texte'],
},
'mistral': {
name: 'Mistral 7B',
category: 'text',
size: '4.1 GB',
description: 'Effizientes europäisches Modell mit guter deutscher Sprachunterstützung.',
useCases: ['Deutsche Texte', 'Schnelle Verarbeitung', 'Allgemeine Korrektur'],
},
'mixtral:8x7b': {
name: 'Mixtral 8x7B',
category: 'text',
size: '26 GB',
description: 'Mixture-of-Experts Modell. Kombiniert Geschwindigkeit mit hoher Qualität.',
useCases: ['Komplexe Korrektur', 'Multi-Aspekt-Bewertung', 'Wissenschaftliche Arbeiten'],
},
'gemma2:9b': {
name: 'Gemma 2 9B',
category: 'text',
size: '5.5 GB',
description: 'Googles kompaktes Modell. Gut für Instruktionen und Bewertungen.',
useCases: ['Strukturierte Bewertung', 'Feedback', 'Zusammenfassungen'],
},
'phi3': {
name: 'Phi-3',
category: 'text',
size: '2.3 GB',
description: 'Microsofts kleines aber leistungsfähiges Modell.',
useCases: ['Schnelle Checks', 'Einfache Korrektur', 'Ressourcenschonend'],
},
}
// Empfohlene Modelle für spezifische Anwendungsfälle
const RECOMMENDED_MODELS = {
handwriting: [
{ model: 'llama3.2-vision:11b', reason: 'Beste Balance aus Qualität und Geschwindigkeit für Handschrift' },
{ model: 'minicpm-v', reason: 'Schnell und ressourcenschonend für einfache Handschrift' },
{ model: 'llava:13b', reason: 'Gute Alternative mit bewährter Vision-Architektur' },
],
grading: [
{ model: 'qwen2.5:14b', reason: 'Beste Qualität für deutsche Klausurkorrektur' },
{ model: 'llama3.1:8b', reason: 'Schnell für einfache Bewertungen' },
{ model: 'mistral', reason: 'Europäisches Modell mit guter Sprachqualität' },
]
}
export default function MacMiniControlPage() {
const [status, setStatus] = useState<MacMiniStatus | null>(null)
const [loading, setLoading] = useState(true)
const [actionLoading, setActionLoading] = useState<string | null>(null)
const [error, setError] = useState<string | null>(null)
const [message, setMessage] = useState<string | null>(null)
const [downloadProgress, setDownloadProgress] = useState<DownloadProgress | null>(null)
const [modelInput, setModelInput] = useState('')
const [selectedModel, setSelectedModel] = useState<string | null>(null)
const [showRecommendations, setShowRecommendations] = useState(false)
const eventSourceRef = useRef<EventSource | null>(null)
// Get model info from database
const getModelInfo = (modelName: string): ModelDescription | null => {
// Try exact match first
if (MODEL_DATABASE[modelName]) return MODEL_DATABASE[modelName]
// Try base name (without tag)
const baseName = modelName.split(':')[0]
const matchingKey = Object.keys(MODEL_DATABASE).find(key =>
key.startsWith(baseName) || key === baseName
)
return matchingKey ? MODEL_DATABASE[matchingKey] : null
}
// Check if model is installed
const isModelInstalled = (modelName: string): boolean => {
if (!status?.models) return false
return status.models.some(m =>
m.name === modelName || m.name.startsWith(modelName.split(':')[0])
)
}
// API Endpoint (Mac Mini Backend or local proxy)
const API_BASE = 'http://192.168.178.100:8000/api/mac-mini'
// Fetch status
const fetchStatus = useCallback(async () => {
setLoading(true)
setError(null)
try {
const response = await fetch(`${API_BASE}/status`)
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || `HTTP ${response.status}`)
}
setStatus(data)
} catch (err) {
setError(err instanceof Error ? err.message : 'Verbindungsfehler')
setStatus({
online: false,
ping: false,
ssh: false,
docker: false,
ollama: false,
internet: false,
ip: '192.168.178.100',
error: 'Verbindung fehlgeschlagen'
})
} finally {
setLoading(false)
}
}, [])
// Initial load
useEffect(() => {
fetchStatus()
}, [fetchStatus])
// Auto-refresh every 30 seconds
useEffect(() => {
const interval = setInterval(fetchStatus, 30000)
return () => clearInterval(interval)
}, [fetchStatus])
// Wake on LAN
const wakeOnLan = async () => {
setActionLoading('wake')
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/wake`, { method: 'POST' })
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || 'Wake-on-LAN fehlgeschlagen')
}
setMessage('Wake-on-LAN Paket gesendet')
setTimeout(fetchStatus, 5000)
setTimeout(fetchStatus, 15000)
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Aufwecken')
} finally {
setActionLoading(null)
}
}
// Restart
const restart = async () => {
if (!confirm('Mac Mini wirklich neu starten?')) return
setActionLoading('restart')
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/restart`, { method: 'POST' })
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || 'Neustart fehlgeschlagen')
}
setMessage('Neustart eingeleitet')
setTimeout(fetchStatus, 30000)
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Neustart')
} finally {
setActionLoading(null)
}
}
// Shutdown
const shutdown = async () => {
if (!confirm('Mac Mini wirklich herunterfahren?')) return
setActionLoading('shutdown')
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/shutdown`, { method: 'POST' })
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || 'Shutdown fehlgeschlagen')
}
setMessage('Shutdown eingeleitet')
setTimeout(fetchStatus, 10000)
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Herunterfahren')
} finally {
setActionLoading(null)
}
}
// Docker Up
const dockerUp = async () => {
setActionLoading('docker-up')
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/docker/up`, { method: 'POST' })
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || 'Docker Start fehlgeschlagen')
}
setMessage('Docker Container werden gestartet...')
setTimeout(fetchStatus, 5000)
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Docker Start')
} finally {
setActionLoading(null)
}
}
// Docker Down
const dockerDown = async () => {
if (!confirm('Docker Container wirklich stoppen?')) return
setActionLoading('docker-down')
setError(null)
setMessage(null)
try {
const response = await fetch(`${API_BASE}/docker/down`, { method: 'POST' })
const data = await response.json()
if (!response.ok) {
throw new Error(data.detail || 'Docker Stop fehlgeschlagen')
}
setMessage('Docker Container werden gestoppt...')
setTimeout(fetchStatus, 5000)
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Docker Stop')
} finally {
setActionLoading(null)
}
}
// Pull Model with SSE Progress
const pullModel = async () => {
if (!modelInput.trim()) return
setActionLoading('pull')
setError(null)
setMessage(null)
setDownloadProgress({
model: modelInput,
status: 'starting',
completed: 0,
total: 0,
percent: 0
})
try {
// Close any existing EventSource
if (eventSourceRef.current) {
eventSourceRef.current.close()
}
// Use fetch with streaming for progress
const response = await fetch(`${API_BASE}/ollama/pull`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ model: modelInput })
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.detail || 'Model Pull fehlgeschlagen')
}
const reader = response.body?.getReader()
const decoder = new TextDecoder()
if (reader) {
while (true) {
const { done, value } = await reader.read()
if (done) break
const text = decoder.decode(value)
const lines = text.split('\n').filter(line => line.trim())
for (const line of lines) {
try {
const data = JSON.parse(line)
if (data.status === 'downloading' && data.total) {
setDownloadProgress({
model: modelInput,
status: data.status,
completed: data.completed || 0,
total: data.total,
percent: Math.round((data.completed || 0) / data.total * 100)
})
} else if (data.status === 'success') {
setMessage(`Modell ${modelInput} erfolgreich heruntergeladen`)
setDownloadProgress(null)
setModelInput('')
fetchStatus()
} else if (data.error) {
throw new Error(data.error)
}
} catch (e) {
// Skip parsing errors for incomplete chunks
}
}
}
}
} catch (err) {
setError(err instanceof Error ? err.message : 'Fehler beim Model Download')
setDownloadProgress(null)
} finally {
setActionLoading(null)
}
}
// Format bytes
const formatBytes = (bytes: number) => {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]
}
// Status badge styling
const getStatusBadge = (online: boolean) => {
return online
? 'px-3 py-1 rounded-full text-sm font-semibold bg-green-100 text-green-800'
: 'px-3 py-1 rounded-full text-sm font-semibold bg-red-100 text-red-800'
}
const getServiceStatus = (ok: boolean) => {
return ok
? 'flex items-center gap-2 text-green-600'
: 'flex items-center gap-2 text-red-500'
}
return (
<AdminLayout title="Mac Mini Control" description="Headless Server Management">
{/* Power Controls */}
<div className="bg-white rounded-xl border border-slate-200 p-6 mb-6">
<div className="flex items-center justify-between mb-6">
<div className="flex items-center gap-4">
<div className="text-4xl">🖥</div>
<div>
<h2 className="text-xl font-bold text-slate-900">Mac Mini Headless</h2>
<p className="text-slate-500 text-sm">IP: {status?.ip || '192.168.178.100'}</p>
</div>
</div>
<span className={getStatusBadge(status?.online || false)}>
{loading ? 'Laden...' : status?.online ? 'Online' : 'Offline'}
</span>
</div>
{/* Power Buttons */}
<div className="flex items-center gap-4 mb-6">
<button
onClick={wakeOnLan}
disabled={actionLoading !== null}
className="px-4 py-2 bg-green-600 text-white rounded-lg font-medium hover:bg-green-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'wake' ? '...' : '⚡ Wake on LAN'}
</button>
<button
onClick={restart}
disabled={actionLoading !== null || !status?.online}
className="px-4 py-2 bg-yellow-600 text-white rounded-lg font-medium hover:bg-yellow-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'restart' ? '...' : '🔄 Neustart'}
</button>
<button
onClick={shutdown}
disabled={actionLoading !== null || !status?.online}
className="px-4 py-2 bg-red-600 text-white rounded-lg font-medium hover:bg-red-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'shutdown' ? '...' : '⏻ Herunterfahren'}
</button>
<button
onClick={fetchStatus}
disabled={loading}
className="px-4 py-2 border border-slate-300 text-slate-700 rounded-lg font-medium hover:bg-slate-50 disabled:opacity-50 transition-colors"
>
{loading ? '...' : '🔍 Status aktualisieren'}
</button>
{message && <span className="ml-4 text-sm text-green-600 font-medium">{message}</span>}
{error && <span className="ml-4 text-sm text-red-600 font-medium">{error}</span>}
</div>
{/* Service Status Grid */}
<div className="grid grid-cols-2 md:grid-cols-5 gap-4">
<div className="bg-slate-50 rounded-lg p-4">
<div className="text-sm text-slate-500 mb-1">Ping</div>
<div className={getServiceStatus(status?.ping || false)}>
<span className={`w-2 h-2 rounded-full ${status?.ping ? 'bg-green-500' : 'bg-red-500'}`}></span>
{status?.ping ? 'Erreichbar' : 'Nicht erreichbar'}
</div>
</div>
<div className="bg-slate-50 rounded-lg p-4">
<div className="text-sm text-slate-500 mb-1">SSH</div>
<div className={getServiceStatus(status?.ssh || false)}>
<span className={`w-2 h-2 rounded-full ${status?.ssh ? 'bg-green-500' : 'bg-red-500'}`}></span>
{status?.ssh ? 'Verbunden' : 'Getrennt'}
</div>
</div>
<div className="bg-slate-50 rounded-lg p-4">
<div className="text-sm text-slate-500 mb-1">Docker</div>
<div className={getServiceStatus(status?.docker || false)}>
<span className={`w-2 h-2 rounded-full ${status?.docker ? 'bg-green-500' : 'bg-red-500'}`}></span>
{status?.docker ? 'Aktiv' : 'Inaktiv'}
</div>
</div>
<div className="bg-slate-50 rounded-lg p-4">
<div className="text-sm text-slate-500 mb-1">Ollama</div>
<div className={getServiceStatus(status?.ollama || false)}>
<span className={`w-2 h-2 rounded-full ${status?.ollama ? 'bg-green-500' : 'bg-red-500'}`}></span>
{status?.ollama ? 'Bereit' : 'Nicht bereit'}
</div>
</div>
<div className="bg-slate-50 rounded-lg p-4">
<div className="text-sm text-slate-500 mb-1">Uptime</div>
<div className="font-semibold text-slate-700">
{status?.uptime || '-'}
</div>
</div>
</div>
</div>
{/* Internet Status Banner */}
<div className={`rounded-xl border p-4 mb-6 ${
status?.internet
? 'bg-green-50 border-green-200'
: 'bg-amber-50 border-amber-200'
}`}>
<div className="flex items-start justify-between">
<div className="flex gap-3">
<span className="text-2xl">{status?.internet ? '🌐' : '📴'}</span>
<div>
<h3 className={`font-semibold ${status?.internet ? 'text-green-900' : 'text-amber-900'}`}>
Internet: {status?.internet ? 'Verbunden' : 'Offline (Normalbetrieb)'}
</h3>
<p className={`text-sm mt-1 ${status?.internet ? 'text-green-700' : 'text-amber-700'}`}>
{status?.internet
? 'Mac Mini hat Internet-Zugang. LLM-Downloads und Updates möglich.'
: 'Mac Mini arbeitet offline. Für bestimmte Aktionen muss Internet aktiviert werden.'}
</p>
</div>
</div>
<span className={`px-3 py-1 rounded-full text-sm font-semibold ${
status?.internet
? 'bg-green-100 text-green-800'
: 'bg-amber-100 text-amber-800'
}`}>
{status?.internet ? 'Online' : 'Offline'}
</span>
</div>
{/* Internet Required Actions - nur anzeigen wenn offline */}
{!status?.internet && (
<div className="mt-4 pt-4 border-t border-amber-200">
<h4 className="font-medium text-amber-900 mb-2"> Diese Aktionen benötigen Internet:</h4>
<div className="grid grid-cols-1 md:grid-cols-2 gap-2">
{INTERNET_REQUIRED_ACTIONS.map((item, idx) => (
<div key={idx} className="flex items-start gap-2 text-sm">
<span className="text-amber-600 mt-0.5"></span>
<div>
<span className="font-medium text-amber-800">{item.action}</span>
<span className="text-amber-600 ml-1"> {item.description}</span>
</div>
</div>
))}
</div>
<p className="text-xs text-amber-600 mt-3 italic">
💡 Tipp: Internet am Router/Switch nur bei Bedarf für den Mac Mini aktivieren.
</p>
</div>
)}
</div>
{/* Docker Section */}
<div className="bg-white rounded-xl border border-slate-200 p-6 mb-6">
<div className="flex items-center justify-between mb-4">
<h3 className="font-semibold text-slate-900 flex items-center gap-2">
<span className="text-2xl">🐳</span> Docker Container
</h3>
<div className="flex gap-2">
<button
onClick={dockerUp}
disabled={actionLoading !== null || !status?.online}
className="px-3 py-1.5 bg-green-600 text-white text-sm rounded-lg font-medium hover:bg-green-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'docker-up' ? '...' : '▶ Start'}
</button>
<button
onClick={dockerDown}
disabled={actionLoading !== null || !status?.online}
className="px-3 py-1.5 bg-red-600 text-white text-sm rounded-lg font-medium hover:bg-red-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'docker-down' ? '...' : '⏹ Stop'}
</button>
</div>
</div>
{status?.containers && status.containers.length > 0 ? (
<div className="space-y-2">
{status.containers.map((container, idx) => (
<div key={idx} className="flex items-center justify-between bg-slate-50 rounded-lg p-3">
<div className="flex items-center gap-3">
<span className={`w-2 h-2 rounded-full ${
container.status.includes('Up') ? 'bg-green-500' : 'bg-red-500'
}`}></span>
<span className="font-medium text-slate-700">{container.name}</span>
</div>
<div className="flex items-center gap-4">
{container.ports && (
<span className="text-sm text-slate-500 font-mono">{container.ports}</span>
)}
<span className={`text-sm ${
container.status.includes('Up') ? 'text-green-600' : 'text-red-500'
}`}>
{container.status}
</span>
</div>
</div>
))}
</div>
) : (
<p className="text-slate-500 text-center py-4">
{status?.online ? 'Keine Container gefunden' : 'Server nicht erreichbar'}
</p>
)}
</div>
{/* Ollama Section */}
<div className="bg-white rounded-xl border border-slate-200 p-6">
<h3 className="font-semibold text-slate-900 flex items-center gap-2 mb-4">
<span className="text-2xl">🤖</span> Ollama LLM Modelle
</h3>
{/* Installed Models */}
{status?.models && status.models.length > 0 ? (
<div className="space-y-2 mb-6">
{status.models.map((model, idx) => {
const modelInfo = getModelInfo(model.name)
return (
<div key={idx} className="flex items-center justify-between bg-slate-50 rounded-lg p-3 hover:bg-slate-100 transition-colors">
<div className="flex items-center gap-3">
<span className="w-2 h-2 rounded-full bg-green-500"></span>
<span className="font-medium text-slate-700">{model.name}</span>
{modelInfo && (
<button
onClick={() => setSelectedModel(model.name)}
className="text-blue-500 hover:text-blue-700 transition-colors"
title="Modell-Info anzeigen"
>
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
</button>
)}
{modelInfo?.category === 'vision' && (
<span className="px-2 py-0.5 text-xs bg-purple-100 text-purple-700 rounded-full">Vision</span>
)}
</div>
<div className="flex items-center gap-4">
<span className="text-sm text-slate-500">{model.size}</span>
<span className="text-sm text-slate-400">{model.modified}</span>
</div>
</div>
)
})}
</div>
) : (
<p className="text-slate-500 text-center py-4 mb-6">
{status?.ollama ? 'Keine Modelle installiert' : 'Ollama nicht erreichbar'}
</p>
)}
{/* Model Info Modal */}
{selectedModel && (
<div className="fixed inset-0 bg-black/50 flex items-center justify-center z-50" onClick={() => setSelectedModel(null)}>
<div className="bg-white rounded-xl p-6 max-w-lg w-full mx-4 shadow-2xl" onClick={e => e.stopPropagation()}>
{(() => {
const info = getModelInfo(selectedModel)
if (!info) return <p>Keine Informationen verfügbar</p>
return (
<>
<div className="flex items-start justify-between mb-4">
<div>
<h3 className="text-xl font-bold text-slate-900">{info.name}</h3>
<div className="flex items-center gap-2 mt-1">
<span className={`px-2 py-0.5 text-xs rounded-full ${
info.category === 'vision' ? 'bg-purple-100 text-purple-700' :
info.category === 'text' ? 'bg-blue-100 text-blue-700' :
'bg-slate-100 text-slate-700'
}`}>
{info.category === 'vision' ? '👁️ Vision' : info.category === 'text' ? '📝 Text' : info.category}
</span>
<span className="text-sm text-slate-500">{info.size}</span>
</div>
</div>
<button onClick={() => setSelectedModel(null)} className="text-slate-400 hover:text-slate-600">
<svg className="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
<p className="text-slate-600 mb-4">{info.description}</p>
<div>
<h4 className="font-medium text-slate-700 mb-2">Geeignet für:</h4>
<div className="flex flex-wrap gap-2">
{info.useCases.map((useCase, i) => (
<span key={i} className="px-3 py-1 bg-slate-100 text-slate-700 rounded-full text-sm">
{useCase}
</span>
))}
</div>
</div>
</>
)
})()}
</div>
</div>
)}
{/* Download New Model */}
<div className="border-t border-slate-200 pt-6">
<h4 className="font-medium text-slate-700 mb-3">Neues Modell herunterladen</h4>
<div className="flex gap-3 mb-4">
<input
type="text"
value={modelInput}
onChange={(e) => setModelInput(e.target.value)}
placeholder="z.B. llama3.2, mistral, qwen2.5:14b"
className="flex-1 px-4 py-2 border border-slate-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent"
disabled={actionLoading === 'pull'}
/>
<button
onClick={pullModel}
disabled={actionLoading !== null || !status?.ollama || !modelInput.trim()}
className="px-6 py-2 bg-primary-600 text-white rounded-lg font-medium hover:bg-primary-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
{actionLoading === 'pull' ? 'Lädt...' : 'Herunterladen'}
</button>
</div>
{/* Download Progress */}
{downloadProgress && (
<div className="bg-slate-50 rounded-lg p-4">
<div className="flex justify-between mb-2">
<span className="font-medium text-slate-700">{downloadProgress.model}</span>
<span className="text-sm text-slate-500">
{formatBytes(downloadProgress.completed)} / {formatBytes(downloadProgress.total)}
</span>
</div>
<div className="h-3 bg-slate-200 rounded-full overflow-hidden">
<div
className="h-full bg-gradient-to-r from-primary-500 to-primary-600 transition-all duration-300"
style={{ width: `${downloadProgress.percent}%` }}
></div>
</div>
<div className="text-center mt-2 text-sm font-medium text-slate-600">
{downloadProgress.percent}%
</div>
</div>
)}
{/* Toggle Recommendations */}
<button
onClick={() => setShowRecommendations(!showRecommendations)}
className="mt-4 text-primary-600 hover:text-primary-700 font-medium text-sm flex items-center gap-2"
>
<svg className={`w-4 h-4 transition-transform ${showRecommendations ? 'rotate-180' : ''}`} fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 9l-7 7-7-7" />
</svg>
{showRecommendations ? 'Empfehlungen ausblenden' : 'Modell-Empfehlungen für Klausurkorrektur & Handschrift anzeigen'}
</button>
</div>
{/* Recommendations Section */}
{showRecommendations && (
<div className="border-t border-slate-200 pt-6 mt-6">
<h4 className="font-semibold text-slate-900 mb-4">📚 Empfohlene Modelle</h4>
{/* Handwriting Recognition */}
<div className="mb-6">
<h5 className="font-medium text-slate-700 flex items-center gap-2 mb-3">
<span className="text-lg"></span> Handschrifterkennung (Vision-Modelle)
</h5>
<div className="space-y-2">
{RECOMMENDED_MODELS.handwriting.map((rec, idx) => {
const info = MODEL_DATABASE[rec.model]
const installed = isModelInstalled(rec.model)
return (
<div key={idx} className={`flex items-center justify-between rounded-lg p-3 ${installed ? 'bg-green-50 border border-green-200' : 'bg-slate-50'}`}>
<div className="flex-1">
<div className="flex items-center gap-2">
<span className="font-medium text-slate-700">{info?.name || rec.model}</span>
<span className="px-2 py-0.5 text-xs bg-purple-100 text-purple-700 rounded-full">Vision</span>
{info?.recommended && <span className="px-2 py-0.5 text-xs bg-yellow-100 text-yellow-700 rounded-full"> Empfohlen</span>}
{installed && <span className="px-2 py-0.5 text-xs bg-green-100 text-green-700 rounded-full"> Installiert</span>}
</div>
<p className="text-sm text-slate-500 mt-1">{rec.reason}</p>
<p className="text-xs text-slate-400 mt-0.5">Größe: {info?.size || 'unbekannt'}</p>
</div>
{!installed && (
<button
onClick={() => { setModelInput(rec.model); pullModel() }}
disabled={actionLoading !== null || !status?.ollama}
className="ml-4 px-4 py-2 bg-primary-600 text-white text-sm rounded-lg font-medium hover:bg-primary-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
Installieren
</button>
)}
</div>
)
})}
</div>
</div>
{/* Grading / Text Analysis */}
<div>
<h5 className="font-medium text-slate-700 flex items-center gap-2 mb-3">
<span className="text-lg">📝</span> Klausurkorrektur (Text-Modelle)
</h5>
<div className="space-y-2">
{RECOMMENDED_MODELS.grading.map((rec, idx) => {
const info = MODEL_DATABASE[rec.model]
const installed = isModelInstalled(rec.model)
return (
<div key={idx} className={`flex items-center justify-between rounded-lg p-3 ${installed ? 'bg-green-50 border border-green-200' : 'bg-slate-50'}`}>
<div className="flex-1">
<div className="flex items-center gap-2">
<span className="font-medium text-slate-700">{info?.name || rec.model}</span>
<span className="px-2 py-0.5 text-xs bg-blue-100 text-blue-700 rounded-full">Text</span>
{info?.recommended && <span className="px-2 py-0.5 text-xs bg-yellow-100 text-yellow-700 rounded-full"> Empfohlen</span>}
{installed && <span className="px-2 py-0.5 text-xs bg-green-100 text-green-700 rounded-full"> Installiert</span>}
</div>
<p className="text-sm text-slate-500 mt-1">{rec.reason}</p>
<p className="text-xs text-slate-400 mt-0.5">Größe: {info?.size || 'unbekannt'}</p>
</div>
{!installed && (
<button
onClick={() => { setModelInput(rec.model); pullModel() }}
disabled={actionLoading !== null || !status?.ollama}
className="ml-4 px-4 py-2 bg-primary-600 text-white text-sm rounded-lg font-medium hover:bg-primary-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
>
Installieren
</button>
)}
</div>
)
})}
</div>
</div>
{/* Info Box */}
<div className="mt-6 bg-amber-50 border border-amber-200 rounded-lg p-4">
<div className="flex gap-3">
<span className="text-xl">💡</span>
<div>
<h5 className="font-medium text-amber-900">Tipp: Modell-Kombinationen</h5>
<p className="text-sm text-amber-800 mt-1">
Für beste Ergebnisse bei Klausuren mit Handschrift kombiniere ein <strong>Vision-Modell</strong> (für OCR/Handschrifterkennung)
mit einem <strong>Text-Modell</strong> (für Bewertung und Feedback). Beispiel: <code className="bg-amber-100 px-1 rounded">llama3.2-vision:11b</code> + <code className="bg-amber-100 px-1 rounded">qwen2.5:14b</code>
</p>
</div>
</div>
</div>
</div>
)}
</div>
{/* Info */}
<div className="mt-6 bg-blue-50 border border-blue-200 rounded-xl p-4">
<div className="flex gap-3">
<svg className="w-5 h-5 text-blue-600 flex-shrink-0 mt-0.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<div>
<h4 className="font-semibold text-blue-900">Mac Mini Headless Server</h4>
<p className="text-sm text-blue-800 mt-1">
Der Mac Mini läuft ohne Monitor im LAN (192.168.178.100). Er hostet Docker-Container
für das Backend, Ollama für lokale LLM-Verarbeitung und weitere Services.
Wake-on-LAN ermöglicht das Remote-Einschalten.
</p>
</div>
</div>
</div>
</AdminLayout>
)
}