This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
breakpilot-pwa/studio-v2/components/AiPrompt.tsx
Benjamin Admin 21a844cb8a fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 09:51:32 +01:00

262 lines
9.0 KiB
TypeScript
Raw Blame History

This file contains invisible Unicode characters
This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
'use client'
/**
* AI Prompt Component for Studio v2
*
* Eingabezeile für Fragen an den lokalen Ollama-Server.
* Unterstützt Streaming-Antworten und automatische Modell-Erkennung.
* Angepasst an das glassmorphism Design von Studio v2.
*/
import { useState, useEffect, useRef } from 'react'
import { useTheme } from '@/lib/ThemeContext'
interface OllamaModel {
name: string
size: number
digest: string
}
export function AiPrompt() {
const [prompt, setPrompt] = useState('')
const [response, setResponse] = useState('')
const [isLoading, setIsLoading] = useState(false)
const [models, setModels] = useState<OllamaModel[]>([])
const [selectedModel, setSelectedModel] = useState('llama3.2:latest')
const [showResponse, setShowResponse] = useState(false)
const textareaRef = useRef<HTMLTextAreaElement>(null)
const abortControllerRef = useRef<AbortController | null>(null)
const { isDark } = useTheme()
// Lade verfügbare Modelle von Ollama
useEffect(() => {
const loadModels = async () => {
try {
const ollamaUrl = getOllamaBaseUrl()
const res = await fetch(`${ollamaUrl}/api/tags`)
if (res.ok) {
const data = await res.json()
if (data.models && data.models.length > 0) {
setModels(data.models)
setSelectedModel(data.models[0].name)
}
}
} catch (error) {
console.log('Ollama nicht erreichbar:', error)
}
}
loadModels()
}, [])
const getOllamaBaseUrl = () => {
if (typeof window !== 'undefined') {
if (window.location.hostname === 'macmini') {
return 'http://macmini:11434'
}
}
return 'http://localhost:11434'
}
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
sendPrompt()
}
}
const autoResize = () => {
if (textareaRef.current) {
textareaRef.current.style.height = 'auto'
textareaRef.current.style.height = Math.min(textareaRef.current.scrollHeight, 120) + 'px'
}
}
const sendPrompt = async () => {
if (!prompt.trim() || isLoading) return
// Vorherige Anfrage abbrechen
if (abortControllerRef.current) {
abortControllerRef.current.abort()
}
abortControllerRef.current = new AbortController()
setIsLoading(true)
setResponse('')
setShowResponse(true)
try {
const ollamaUrl = getOllamaBaseUrl()
const res = await fetch(`${ollamaUrl}/api/generate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: selectedModel,
prompt: prompt.trim(),
stream: true,
}),
signal: abortControllerRef.current.signal,
})
if (!res.ok) {
throw new Error(`Ollama Fehler: ${res.status}`)
}
const reader = res.body?.getReader()
const decoder = new TextDecoder()
let fullResponse = ''
if (reader) {
while (true) {
const { done, value } = await reader.read()
if (done) break
const chunk = decoder.decode(value)
const lines = chunk.split('\n').filter(l => l.trim())
for (const line of lines) {
try {
const data = JSON.parse(line)
if (data.response) {
fullResponse += data.response
setResponse(fullResponse)
}
} catch {
// Ignore JSON parse errors for partial chunks
}
}
}
}
} catch (error) {
if ((error as Error).name === 'AbortError') {
setResponse('Anfrage abgebrochen.')
} else {
console.error('AI Prompt Fehler:', error)
setResponse(`Fehler: ${(error as Error).message}\n\nBitte prüfen Sie, ob Ollama läuft.`)
}
} finally {
setIsLoading(false)
abortControllerRef.current = null
}
}
const formatResponse = (text: string) => {
// Einfache Markdown-Formatierung
return text
.replace(/```(\w+)?\n([\s\S]*?)```/g, `<pre class="${isDark ? 'bg-white/10' : 'bg-slate-800'} text-slate-100 p-3 rounded-lg my-2 overflow-x-auto text-sm"><code>$2</code></pre>`)
.replace(/`([^`]+)`/g, `<code class="${isDark ? 'bg-white/20' : 'bg-slate-200'} px-1.5 py-0.5 rounded text-sm">$1</code>`)
.replace(/\*\*([^*]+)\*\*/g, '<strong>$1</strong>')
.replace(/\*([^*]+)\*/g, '<em>$1</em>')
.replace(/\n/g, '<br>')
}
return (
<div className={`backdrop-blur-xl border rounded-3xl p-6 mb-8 transition-all ${
isDark
? 'bg-gradient-to-r from-purple-500/10 to-pink-500/10 border-purple-500/30'
: 'bg-gradient-to-r from-purple-50 to-pink-50 border-purple-200 shadow-lg'
}`}>
{/* Header */}
<div className="flex items-center gap-3 mb-4">
<div className={`w-12 h-12 rounded-2xl flex items-center justify-center text-2xl shadow-lg ${
isDark
? 'bg-gradient-to-br from-purple-500 to-pink-500'
: 'bg-gradient-to-br from-purple-400 to-pink-400'
}`}>
🤖
</div>
<div>
<h3 className={`font-semibold ${isDark ? 'text-white' : 'text-slate-900'}`}>KI-Assistent</h3>
<p className={`text-xs ${isDark ? 'text-white/50' : 'text-slate-500'}`}>
Fragen Sie Ihren lokalen Ollama-Assistenten
</p>
</div>
</div>
{/* Input */}
<div className="flex gap-3 items-end">
<textarea
ref={textareaRef}
value={prompt}
onChange={(e) => {
setPrompt(e.target.value)
autoResize()
}}
onKeyDown={handleKeyDown}
placeholder="Stellen Sie eine Frage... (z.B. 'Wie schreibe ich einen Elternbrief?' oder 'Erstelle mir einen Lückentext')"
rows={1}
className={`flex-1 min-h-[48px] max-h-[120px] px-5 py-3 rounded-2xl border resize-none transition-all ${
isDark
? 'bg-white/10 border-white/20 text-white placeholder-white/40 focus:outline-none focus:ring-2 focus:ring-purple-500/50'
: 'bg-white/80 border-slate-200 text-slate-900 placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-purple-300'
}`}
/>
<button
onClick={sendPrompt}
disabled={isLoading || !prompt.trim()}
className={`w-12 h-12 rounded-2xl flex items-center justify-center text-white text-lg transition-all shadow-lg ${
isLoading
? 'bg-slate-500 cursor-wait animate-pulse'
: 'bg-gradient-to-br from-purple-500 to-pink-500 hover:shadow-xl hover:shadow-purple-500/30 hover:scale-105 disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:scale-100'
}`}
>
{isLoading ? '⏳' : '➤'}
</button>
</div>
{/* Response */}
{showResponse && (
<div className={`mt-4 p-5 rounded-2xl border ${
isDark
? 'bg-white/5 border-white/10'
: 'bg-white/80 border-slate-200 shadow-inner'
}`}>
<div className={`flex items-center gap-2 text-xs mb-3 ${isDark ? 'text-white/50' : 'text-slate-500'}`}>
<span>🤖</span>
<span className="font-medium">{selectedModel}</span>
{isLoading && <span className="animate-pulse"> Generiert...</span>}
</div>
<div
className={`text-sm leading-relaxed prose prose-sm max-w-none ${isDark ? 'text-white/80' : 'text-slate-700'}`}
dangerouslySetInnerHTML={{ __html: formatResponse(response) || `<span class="${isDark ? 'text-white/40' : 'text-slate-400'} italic">Warte auf Antwort...</span>` }}
/>
</div>
)}
{/* Model Selector */}
<div className={`flex items-center gap-2 mt-4 pt-4 border-t ${isDark ? 'border-white/10' : 'border-slate-200'}`}>
<span className={`text-xs ${isDark ? 'text-white/50' : 'text-slate-500'}`}>Modell:</span>
<select
value={selectedModel}
onChange={(e) => setSelectedModel(e.target.value)}
className={`text-xs px-3 py-1.5 rounded-xl border cursor-pointer transition-all ${
isDark
? 'bg-white/10 border-white/20 text-white focus:outline-none focus:ring-2 focus:ring-purple-500/50'
: 'bg-white border-slate-200 text-slate-700 focus:outline-none focus:ring-2 focus:ring-purple-300'
}`}
>
{models.length > 0 ? (
models.map((model) => (
<option key={model.name} value={model.name}>
{model.name}
</option>
))
) : (
<>
<option value="llama3.2:latest">Llama 3.2</option>
<option value="mistral:latest">Mistral</option>
<option value="qwen2.5:7b">Qwen 2.5</option>
</>
)}
</select>
{models.length === 0 && (
<span className={`text-xs ${isDark ? 'text-amber-400' : 'text-amber-600'}`}>
Ollama nicht verbunden
</span>
)}
</div>
</div>
)
}
export default AiPrompt