Files
breakpilot-lehrer/studio-v2/components/voice/VoiceCapture.tsx
Benjamin Boenisch 5a31f52310 Initial commit: breakpilot-lehrer - Lehrer KI Platform
Services: Admin-Lehrer, Backend-Lehrer, Studio v2, Website,
Klausur-Service, School-Service, Voice-Service, Geo-Service,
BreakPilot Drive, Agent-Core

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 23:47:26 +01:00

245 lines
6.9 KiB
TypeScript

'use client'
import { useEffect, useRef, useState, useCallback } from 'react'
import { VoiceAPI, VoiceMessage, VoiceTask } from '@/lib/voice/voice-api'
import { VoiceIndicator } from './VoiceIndicator'
interface VoiceCaptureProps {
onTranscript?: (text: string, isFinal: boolean) => void
onIntent?: (intent: string, parameters: Record<string, unknown>) => void
onResponse?: (text: string) => void
onTaskCreated?: (task: VoiceTask) => void
onError?: (error: Error) => void
className?: string
}
/**
* Voice capture component with microphone button
* Handles WebSocket connection and audio streaming
*/
export function VoiceCapture({
onTranscript,
onIntent,
onResponse,
onTaskCreated,
onError,
className = '',
}: VoiceCaptureProps) {
const voiceApiRef = useRef<VoiceAPI | null>(null)
const [isInitialized, setIsInitialized] = useState(false)
const [isConnected, setIsConnected] = useState(false)
const [isListening, setIsListening] = useState(false)
const [status, setStatus] = useState<string>('idle')
const [audioLevel, setAudioLevel] = useState(0)
const [transcript, setTranscript] = useState<string>('')
const [error, setError] = useState<string | null>(null)
// Initialize voice API
useEffect(() => {
const init = async () => {
try {
const api = new VoiceAPI()
await api.initialize()
voiceApiRef.current = api
// Set up event handlers
api.setOnMessage(handleMessage)
api.setOnError(handleError)
api.setOnStatusChange(handleStatusChange)
setIsInitialized(true)
} catch (e) {
console.error('Failed to initialize voice API:', e)
setError('Sprachdienst konnte nicht initialisiert werden')
}
}
init()
return () => {
voiceApiRef.current?.disconnect()
}
}, [])
const handleMessage = useCallback(
(message: VoiceMessage) => {
switch (message.type) {
case 'transcript':
setTranscript(message.text)
onTranscript?.(message.text, message.final)
break
case 'intent':
onIntent?.(message.intent, message.parameters)
break
case 'response':
onResponse?.(message.text)
break
case 'task_created':
onTaskCreated?.({
id: message.task_id,
session_id: '',
type: message.task_type,
state: message.state,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
result_available: false,
})
break
case 'error':
setError(message.message)
onError?.(new Error(message.message))
break
}
},
[onTranscript, onIntent, onResponse, onTaskCreated, onError]
)
const handleError = useCallback(
(error: Error) => {
setError(error.message)
setIsListening(false)
onError?.(error)
},
[onError]
)
const handleStatusChange = useCallback((newStatus: string) => {
setStatus(newStatus)
if (newStatus === 'connected') {
setIsConnected(true)
} else if (newStatus === 'disconnected') {
setIsConnected(false)
setIsListening(false)
} else if (newStatus === 'listening') {
setIsListening(true)
} else if (newStatus === 'processing') {
setIsListening(false)
}
}, [])
const toggleListening = async () => {
if (!voiceApiRef.current) return
try {
setError(null)
if (isListening) {
// Stop listening
voiceApiRef.current.stopCapture()
setIsListening(false)
} else {
// Start listening
if (!isConnected) {
await voiceApiRef.current.connect()
}
await voiceApiRef.current.startCapture()
setIsListening(true)
}
} catch (e) {
console.error('Failed to toggle listening:', e)
setError('Mikrofon konnte nicht aktiviert werden')
}
}
const interrupt = () => {
voiceApiRef.current?.interrupt()
}
if (!isInitialized) {
return (
<div className={`flex items-center gap-2 ${className}`}>
<div className="animate-spin w-6 h-6 border-2 border-gray-300 border-t-blue-500 rounded-full" />
<span className="text-sm text-gray-500">Initialisiere...</span>
</div>
)
}
return (
<div className={`flex flex-col gap-4 ${className}`}>
{/* Error display */}
{error && (
<div className="bg-red-50 border border-red-200 text-red-700 px-4 py-2 rounded-lg text-sm">
{error}
</div>
)}
{/* Main controls */}
<div className="flex items-center gap-4">
{/* Microphone button */}
<button
onClick={toggleListening}
disabled={status === 'processing'}
className={`
relative w-16 h-16 rounded-full flex items-center justify-center
transition-all duration-200 focus:outline-none focus:ring-4
${
isListening
? 'bg-red-500 hover:bg-red-600 focus:ring-red-200'
: 'bg-blue-500 hover:bg-blue-600 focus:ring-blue-200'
}
${status === 'processing' ? 'opacity-50 cursor-not-allowed' : ''}
`}
>
{/* Microphone icon */}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="white"
className="w-8 h-8"
>
{isListening ? (
// Stop icon
<path d="M6 6h12v12H6z" />
) : (
// Microphone icon
<path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3zm-1 1.93c-3.94-.49-7-3.85-7-7.93h2c0 2.76 2.24 5 5 5s5-2.24 5-5h2c0 4.08-3.06 7.44-7 7.93V18h4v2H8v-2h4v-2.07z" />
)}
</svg>
{/* Pulsing ring when listening */}
{isListening && (
<span className="absolute inset-0 rounded-full animate-ping bg-red-400 opacity-25" />
)}
</button>
{/* Status indicator */}
<VoiceIndicator
isListening={isListening}
audioLevel={audioLevel}
status={status}
/>
{/* Interrupt button (when responding) */}
{status === 'responding' && (
<button
onClick={interrupt}
className="px-4 py-2 text-sm bg-gray-200 hover:bg-gray-300 rounded-lg"
>
Unterbrechen
</button>
)}
</div>
{/* Transcript display */}
{transcript && (
<div className="bg-gray-50 border border-gray-200 rounded-lg p-4">
<p className="text-sm text-gray-500 mb-1">Erkannt:</p>
<p className="text-gray-800">{transcript}</p>
</div>
)}
{/* Instructions */}
<p className="text-xs text-gray-400">
{isListening
? 'Sprechen Sie jetzt... Klicken Sie erneut zum Beenden.'
: 'Klicken Sie auf das Mikrofon und sprechen Sie Ihren Befehl.'}
</p>
</div>
)
}