A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
338 lines
10 KiB
TypeScript
338 lines
10 KiB
TypeScript
'use client'
|
|
|
|
import { useState, useEffect, useRef, useCallback } from 'react'
|
|
import { VoiceAPI, VoiceMessage, VoiceTask } from '@/lib/voice/voice-api'
|
|
import { VoiceIndicator } from './VoiceIndicator'
|
|
|
|
interface Message {
|
|
id: string
|
|
role: 'user' | 'assistant'
|
|
content: string
|
|
timestamp: Date
|
|
intent?: string
|
|
task?: VoiceTask
|
|
}
|
|
|
|
interface VoiceCommandBarProps {
|
|
onTaskCreated?: (task: VoiceTask) => void
|
|
onTaskApproved?: (taskId: string) => void
|
|
className?: string
|
|
}
|
|
|
|
/**
|
|
* Full voice command bar with conversation history
|
|
* Shows transcript, responses, and pending tasks
|
|
*/
|
|
export function VoiceCommandBar({
|
|
onTaskCreated,
|
|
onTaskApproved,
|
|
className = '',
|
|
}: VoiceCommandBarProps) {
|
|
const voiceApiRef = useRef<VoiceAPI | null>(null)
|
|
const messagesEndRef = useRef<HTMLDivElement>(null)
|
|
|
|
const [isInitialized, setIsInitialized] = useState(false)
|
|
const [isConnected, setIsConnected] = useState(false)
|
|
const [isListening, setIsListening] = useState(false)
|
|
const [status, setStatus] = useState<string>('idle')
|
|
const [messages, setMessages] = useState<Message[]>([])
|
|
const [pendingTasks, setPendingTasks] = useState<VoiceTask[]>([])
|
|
const [error, setError] = useState<string | null>(null)
|
|
|
|
// Auto-scroll to bottom
|
|
useEffect(() => {
|
|
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
|
|
}, [messages])
|
|
|
|
// Initialize voice API
|
|
useEffect(() => {
|
|
const init = async () => {
|
|
try {
|
|
const api = new VoiceAPI()
|
|
await api.initialize()
|
|
voiceApiRef.current = api
|
|
|
|
api.setOnMessage(handleMessage)
|
|
api.setOnError(handleError)
|
|
api.setOnStatusChange(handleStatusChange)
|
|
|
|
setIsInitialized(true)
|
|
} catch (e) {
|
|
console.error('Failed to initialize:', e)
|
|
setError('Sprachdienst konnte nicht initialisiert werden')
|
|
}
|
|
}
|
|
|
|
init()
|
|
|
|
return () => {
|
|
voiceApiRef.current?.disconnect()
|
|
}
|
|
}, [])
|
|
|
|
const handleMessage = useCallback(
|
|
(message: VoiceMessage) => {
|
|
switch (message.type) {
|
|
case 'transcript':
|
|
if (message.final) {
|
|
setMessages((prev) => [
|
|
...prev,
|
|
{
|
|
id: `msg-${Date.now()}`,
|
|
role: 'user',
|
|
content: message.text,
|
|
timestamp: new Date(),
|
|
},
|
|
])
|
|
}
|
|
break
|
|
|
|
case 'intent':
|
|
// Update last user message with intent
|
|
setMessages((prev) => {
|
|
const updated = [...prev]
|
|
const lastUserMsg = [...updated].reverse().find((m) => m.role === 'user')
|
|
if (lastUserMsg) {
|
|
lastUserMsg.intent = message.intent
|
|
}
|
|
return updated
|
|
})
|
|
break
|
|
|
|
case 'response':
|
|
setMessages((prev) => [
|
|
...prev,
|
|
{
|
|
id: `msg-${Date.now()}`,
|
|
role: 'assistant',
|
|
content: message.text,
|
|
timestamp: new Date(),
|
|
},
|
|
])
|
|
break
|
|
|
|
case 'task_created':
|
|
const task: VoiceTask = {
|
|
id: message.task_id,
|
|
session_id: '',
|
|
type: message.task_type,
|
|
state: message.state,
|
|
created_at: new Date().toISOString(),
|
|
updated_at: new Date().toISOString(),
|
|
result_available: false,
|
|
}
|
|
setPendingTasks((prev) => [...prev, task])
|
|
onTaskCreated?.(task)
|
|
|
|
// Update last assistant message with task
|
|
setMessages((prev) => {
|
|
const updated = [...prev]
|
|
const lastAssistantMsg = [...updated].reverse().find((m) => m.role === 'assistant')
|
|
if (lastAssistantMsg) {
|
|
lastAssistantMsg.task = task
|
|
}
|
|
return updated
|
|
})
|
|
break
|
|
|
|
case 'error':
|
|
setError(message.message)
|
|
break
|
|
}
|
|
},
|
|
[onTaskCreated]
|
|
)
|
|
|
|
const handleError = useCallback((error: Error) => {
|
|
setError(error.message)
|
|
setIsListening(false)
|
|
}, [])
|
|
|
|
const handleStatusChange = useCallback((newStatus: string) => {
|
|
setStatus(newStatus)
|
|
setIsConnected(newStatus !== 'idle' && newStatus !== 'disconnected')
|
|
setIsListening(newStatus === 'listening')
|
|
}, [])
|
|
|
|
const toggleListening = async () => {
|
|
if (!voiceApiRef.current) return
|
|
|
|
try {
|
|
setError(null)
|
|
|
|
if (isListening) {
|
|
voiceApiRef.current.stopCapture()
|
|
} else {
|
|
if (!isConnected) {
|
|
await voiceApiRef.current.connect()
|
|
}
|
|
await voiceApiRef.current.startCapture()
|
|
}
|
|
} catch (e) {
|
|
console.error('Failed to toggle listening:', e)
|
|
setError('Mikrofon konnte nicht aktiviert werden')
|
|
}
|
|
}
|
|
|
|
const approveTask = async (taskId: string) => {
|
|
try {
|
|
await voiceApiRef.current?.approveTask(taskId)
|
|
setPendingTasks((prev) => prev.filter((t) => t.id !== taskId))
|
|
onTaskApproved?.(taskId)
|
|
} catch (e) {
|
|
console.error('Failed to approve task:', e)
|
|
}
|
|
}
|
|
|
|
const rejectTask = async (taskId: string) => {
|
|
try {
|
|
await voiceApiRef.current?.rejectTask(taskId)
|
|
setPendingTasks((prev) => prev.filter((t) => t.id !== taskId))
|
|
} catch (e) {
|
|
console.error('Failed to reject task:', e)
|
|
}
|
|
}
|
|
|
|
if (!isInitialized) {
|
|
return (
|
|
<div className={`flex items-center justify-center p-8 ${className}`}>
|
|
<div className="animate-spin w-8 h-8 border-2 border-gray-300 border-t-blue-500 rounded-full" />
|
|
</div>
|
|
)
|
|
}
|
|
|
|
return (
|
|
<div
|
|
className={`flex flex-col bg-white rounded-xl shadow-lg overflow-hidden ${className}`}
|
|
>
|
|
{/* Header */}
|
|
<div className="flex items-center justify-between p-4 bg-gray-50 border-b">
|
|
<div className="flex items-center gap-3">
|
|
<svg
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
viewBox="0 0 24 24"
|
|
fill="currentColor"
|
|
className="w-6 h-6 text-blue-500"
|
|
>
|
|
<path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3z" />
|
|
<path d="M17 11c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z" />
|
|
</svg>
|
|
<span className="font-medium text-gray-800">Breakpilot Voice</span>
|
|
</div>
|
|
<VoiceIndicator isListening={isListening} status={status} />
|
|
</div>
|
|
|
|
{/* Messages */}
|
|
<div className="flex-1 overflow-y-auto p-4 space-y-4 min-h-[200px] max-h-[400px]">
|
|
{messages.length === 0 ? (
|
|
<div className="text-center text-gray-400 py-8">
|
|
<p className="mb-2">Willkommen bei Breakpilot Voice!</p>
|
|
<p className="text-sm">
|
|
Klicken Sie auf das Mikrofon und sprechen Sie Ihren Befehl.
|
|
</p>
|
|
</div>
|
|
) : (
|
|
messages.map((msg) => (
|
|
<div
|
|
key={msg.id}
|
|
className={`flex ${msg.role === 'user' ? 'justify-end' : 'justify-start'}`}
|
|
>
|
|
<div
|
|
className={`max-w-[80%] rounded-lg px-4 py-2 ${
|
|
msg.role === 'user'
|
|
? 'bg-blue-500 text-white'
|
|
: 'bg-gray-100 text-gray-800'
|
|
}`}
|
|
>
|
|
<p>{msg.content}</p>
|
|
{msg.intent && (
|
|
<p className="text-xs mt-1 opacity-70">
|
|
Intent: {msg.intent}
|
|
</p>
|
|
)}
|
|
{msg.task && msg.task.state === 'ready' && (
|
|
<div className="flex gap-2 mt-2">
|
|
<button
|
|
onClick={() => approveTask(msg.task!.id)}
|
|
className="px-2 py-1 text-xs bg-green-500 text-white rounded hover:bg-green-600"
|
|
>
|
|
Bestaetigen
|
|
</button>
|
|
<button
|
|
onClick={() => rejectTask(msg.task!.id)}
|
|
className="px-2 py-1 text-xs bg-gray-300 text-gray-700 rounded hover:bg-gray-400"
|
|
>
|
|
Abbrechen
|
|
</button>
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
))
|
|
)}
|
|
<div ref={messagesEndRef} />
|
|
</div>
|
|
|
|
{/* Error */}
|
|
{error && (
|
|
<div className="px-4 py-2 bg-red-50 border-t border-red-200 text-red-700 text-sm">
|
|
{error}
|
|
</div>
|
|
)}
|
|
|
|
{/* Input area */}
|
|
<div className="p-4 bg-gray-50 border-t">
|
|
<div className="flex items-center gap-4">
|
|
{/* Microphone button */}
|
|
<button
|
|
onClick={toggleListening}
|
|
disabled={status === 'processing'}
|
|
className={`
|
|
w-12 h-12 rounded-full flex items-center justify-center
|
|
transition-all duration-200 focus:outline-none focus:ring-4
|
|
${
|
|
isListening
|
|
? 'bg-red-500 hover:bg-red-600 focus:ring-red-200'
|
|
: 'bg-blue-500 hover:bg-blue-600 focus:ring-blue-200'
|
|
}
|
|
${status === 'processing' ? 'opacity-50 cursor-not-allowed' : ''}
|
|
`}
|
|
>
|
|
<svg
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
viewBox="0 0 24 24"
|
|
fill="white"
|
|
className="w-6 h-6"
|
|
>
|
|
{isListening ? (
|
|
<path d="M6 6h12v12H6z" />
|
|
) : (
|
|
<path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3zm-1 1.93c-3.94-.49-7-3.85-7-7.93h2c0 2.76 2.24 5 5 5s5-2.24 5-5h2c0 4.08-3.06 7.44-7 7.93V18h4v2H8v-2h4v-2.07z" />
|
|
)}
|
|
</svg>
|
|
</button>
|
|
|
|
{/* Text hint */}
|
|
<div className="flex-1 text-sm text-gray-500">
|
|
{isListening
|
|
? 'Ich hoere zu... Sprechen Sie jetzt.'
|
|
: status === 'processing'
|
|
? 'Verarbeite...'
|
|
: 'Tippen Sie auf das Mikrofon um zu sprechen'}
|
|
</div>
|
|
|
|
{/* Pending tasks indicator */}
|
|
{pendingTasks.length > 0 && (
|
|
<div className="flex items-center gap-2">
|
|
<span className="text-xs text-gray-500">
|
|
{pendingTasks.length} Aufgabe(n)
|
|
</span>
|
|
<span className="w-2 h-2 bg-yellow-400 rounded-full animate-pulse" />
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
</div>
|
|
)
|
|
}
|