Files
breakpilot-lehrer/admin-lehrer/app/(admin)/ai/magic-help/useMagicHelp.ts
Benjamin Admin 9ba420fa91
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 42s
CI / test-go-edu-search (push) Successful in 34s
CI / test-python-klausur (push) Failing after 2m51s
CI / test-python-agent-core (push) Successful in 21s
CI / test-nodejs-website (push) Successful in 29s
Fix: Remove broken getKlausurApiUrl and clean up empty lines
sed replacement left orphaned hostname references in story page
and empty lines in getApiBase functions.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-24 16:02:04 +02:00

383 lines
11 KiB
TypeScript

'use client'
import { useState, useEffect, useCallback, useRef } from 'react'
import {
type TabId,
type TrOCRStatus,
type OCRResult,
type TrainingExample,
type MagicSettings,
DEFAULT_SETTINGS,
API_BASE,
} from './types'
function playSuccessSound() {
try {
const audioContext = new (window.AudioContext || (window as unknown as { webkitAudioContext: typeof AudioContext }).webkitAudioContext)()
const oscillator = audioContext.createOscillator()
const gainNode = audioContext.createGain()
oscillator.connect(gainNode)
gainNode.connect(audioContext.destination)
oscillator.frequency.value = 800
oscillator.type = 'sine'
gainNode.gain.setValueAtTime(0.1, audioContext.currentTime)
gainNode.gain.exponentialRampToValueAtTime(0.01, audioContext.currentTime + 0.2)
oscillator.start(audioContext.currentTime)
oscillator.stop(audioContext.currentTime + 0.2)
} catch {
// Audio not supported, ignore
}
}
export function useMagicHelp() {
const [activeTab, setActiveTab] = useState<TabId>('overview')
const [status, setStatus] = useState<TrOCRStatus | null>(null)
const [loading, setLoading] = useState(true)
const [ocrResult, setOcrResult] = useState<OCRResult | null>(null)
const [ocrLoading, setOcrLoading] = useState(false)
const [examples, setExamples] = useState<TrainingExample[]>([])
const [trainingImage, setTrainingImage] = useState<File | null>(null)
const [trainingText, setTrainingText] = useState('')
const [fineTuning, setFineTuning] = useState(false)
const [settings, setSettings] = useState<MagicSettings>(DEFAULT_SETTINGS)
const [settingsSaved, setSettingsSaved] = useState(false)
// Phase 1: New state for enhanced features
const [globalDragActive, setGlobalDragActive] = useState(false)
const [uploadedImage, setUploadedImage] = useState<File | null>(null)
const [imagePreview, setImagePreview] = useState<string | null>(null)
const [showShortcutHint, setShowShortcutHint] = useState(false)
const [showHeatmap, setShowHeatmap] = useState(false)
const [showTrainingDashboard, setShowTrainingDashboard] = useState(false)
const debounceTimer = useRef<NodeJS.Timeout | null>(null)
const dragCounter = useRef(0)
const fetchStatus = useCallback(async () => {
try {
const res = await fetch(`${API_BASE}/api/klausur/trocr/status`)
const data = await res.json()
setStatus(data)
} catch {
setStatus({ status: 'error', error: 'Failed to fetch status' })
} finally {
setLoading(false)
}
}, [])
const fetchExamples = useCallback(async () => {
try {
const res = await fetch(`${API_BASE}/api/klausur/trocr/training/examples`)
const data = await res.json()
setExamples(data.examples || [])
} catch (error) {
console.error('Failed to fetch examples:', error)
}
}, [])
// Phase 1: Live OCR with debounce
const triggerOCR = useCallback(async (file: File) => {
setOcrLoading(true)
setOcrResult(null)
const formData = new FormData()
formData.append('file', file)
try {
const res = await fetch(`${API_BASE}/api/klausur/trocr/extract?detect_lines=${settings.autoDetectLines}`, {
method: 'POST',
body: formData,
})
const data = await res.json()
if (data.text !== undefined) {
setOcrResult(data)
if (settings.soundFeedback && data.confidence > 0.7) {
playSuccessSound()
}
} else {
setOcrResult({ text: `Error: ${data.detail || 'Unknown error'}`, confidence: 0, processing_time_ms: 0, model: '', has_lora_adapter: false })
}
} catch (error) {
setOcrResult({ text: `Error: ${error}`, confidence: 0, processing_time_ms: 0, model: '', has_lora_adapter: false })
} finally {
setOcrLoading(false)
}
}, [settings.autoDetectLines, settings.soundFeedback])
// Handle file upload with live preview
const handleFileUpload = useCallback((file: File) => {
if (!file.type.startsWith('image/')) return
setUploadedImage(file)
const previewUrl = URL.createObjectURL(file)
setImagePreview(previewUrl)
setActiveTab('test')
if (settings.livePreview) {
if (debounceTimer.current) {
clearTimeout(debounceTimer.current)
}
debounceTimer.current = setTimeout(() => {
triggerOCR(file)
}, 500)
}
}, [settings.livePreview, triggerOCR])
const handleManualOCR = () => {
if (uploadedImage) {
triggerOCR(uploadedImage)
}
}
// Phase 1: Global Drag & Drop handler
useEffect(() => {
const handleDragEnter = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
dragCounter.current++
if (e.dataTransfer?.types.includes('Files')) {
setGlobalDragActive(true)
}
}
const handleDragLeave = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
dragCounter.current--
if (dragCounter.current === 0) {
setGlobalDragActive(false)
}
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}
const handleDrop = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
dragCounter.current = 0
setGlobalDragActive(false)
const file = e.dataTransfer?.files[0]
if (file?.type.startsWith('image/')) {
handleFileUpload(file)
}
}
document.addEventListener('dragenter', handleDragEnter)
document.addEventListener('dragleave', handleDragLeave)
document.addEventListener('dragover', handleDragOver)
document.addEventListener('drop', handleDrop)
return () => {
document.removeEventListener('dragenter', handleDragEnter)
document.removeEventListener('dragleave', handleDragLeave)
document.removeEventListener('dragover', handleDragOver)
document.removeEventListener('drop', handleDrop)
}
}, [handleFileUpload])
// Phase 1: Clipboard paste handler (Ctrl+V)
useEffect(() => {
const handlePaste = async (e: ClipboardEvent) => {
const items = e.clipboardData?.items
if (!items) return
for (const item of items) {
if (item.type.startsWith('image/')) {
e.preventDefault()
const file = item.getAsFile()
if (file) {
handleFileUpload(file)
}
break
}
}
}
document.addEventListener('paste', handlePaste)
return () => document.removeEventListener('paste', handlePaste)
}, [handleFileUpload])
// Phase 1: Keyboard shortcuts
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.ctrlKey && e.key === 'Enter' && uploadedImage) {
e.preventDefault()
handleManualOCR()
}
if (e.key >= '1' && e.key <= '6' && e.altKey) {
e.preventDefault()
const tabIndex = parseInt(e.key) - 1
const tabIds: TabId[] = ['overview', 'test', 'batch', 'training', 'architecture', 'settings']
if (tabIds[tabIndex]) {
setActiveTab(tabIds[tabIndex])
}
}
if (e.key === 'Escape' && uploadedImage) {
setUploadedImage(null)
setImagePreview(null)
setOcrResult(null)
}
if (e.key === '?') {
setShowShortcutHint(prev => !prev)
}
}
document.addEventListener('keydown', handleKeyDown)
return () => document.removeEventListener('keydown', handleKeyDown)
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [uploadedImage])
// Initial data load + settings from localStorage
useEffect(() => {
fetchStatus()
fetchExamples()
const saved = localStorage.getItem('magic-help-settings')
if (saved) {
try {
setSettings({ ...DEFAULT_SETTINGS, ...JSON.parse(saved) })
} catch {
// ignore parse errors
}
}
}, [fetchStatus, fetchExamples])
// Cleanup preview URL
useEffect(() => {
return () => {
if (imagePreview) {
URL.revokeObjectURL(imagePreview)
}
}
}, [imagePreview])
const handleAddTrainingExample = async () => {
if (!trainingImage || !trainingText.trim()) {
alert('Please provide both an image and the correct text')
return
}
const formData = new FormData()
formData.append('file', trainingImage)
try {
const res = await fetch(`${API_BASE}/api/klausur/trocr/training/add?ground_truth=${encodeURIComponent(trainingText)}`, {
method: 'POST',
body: formData,
})
const data = await res.json()
if (data.example_id) {
alert(`Training example added! Total: ${data.total_examples}`)
setTrainingImage(null)
setTrainingText('')
fetchStatus()
fetchExamples()
} else {
alert(`Error: ${data.detail || 'Unknown error'}`)
}
} catch (error) {
alert(`Error: ${error}`)
}
}
const handleFineTune = async () => {
if (!confirm('Start fine-tuning? This may take several minutes.')) return
setFineTuning(true)
try {
const res = await fetch(`${API_BASE}/api/klausur/trocr/training/fine-tune`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
epochs: settings.epochs,
learning_rate: settings.learningRate,
lora_rank: settings.loraRank,
lora_alpha: settings.loraAlpha,
}),
})
const data = await res.json()
if (data.status === 'success') {
alert(`Fine-tuning successful!\nExamples used: ${data.examples_used}\nEpochs: ${data.epochs}`)
fetchStatus()
} else {
alert(`Fine-tuning failed: ${data.message}`)
}
} catch (error) {
alert(`Error: ${error}`)
} finally {
setFineTuning(false)
}
}
const saveSettings = () => {
localStorage.setItem('magic-help-settings', JSON.stringify(settings))
setSettingsSaved(true)
setTimeout(() => setSettingsSaved(false), 2000)
}
const clearUploadedImage = () => {
setUploadedImage(null)
setImagePreview(null)
setOcrResult(null)
}
const sendToTraining = () => {
if (uploadedImage && ocrResult) {
setTrainingImage(uploadedImage)
setTrainingText(ocrResult.text)
setActiveTab('training')
}
}
return {
// State
activeTab,
setActiveTab,
status,
loading,
ocrResult,
ocrLoading,
examples,
trainingImage,
setTrainingImage,
trainingText,
setTrainingText,
fineTuning,
settings,
setSettings,
settingsSaved,
globalDragActive,
uploadedImage,
imagePreview,
showShortcutHint,
setShowShortcutHint,
showHeatmap,
setShowHeatmap,
showTrainingDashboard,
setShowTrainingDashboard,
// Actions
fetchStatus,
handleFileUpload,
handleManualOCR,
handleAddTrainingExample,
handleFineTune,
saveSettings,
clearUploadedImage,
sendToTraining,
}
}
export type UseMagicHelpReturn = ReturnType<typeof useMagicHelp>