A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
136 lines
4.0 KiB
Python
136 lines
4.0 KiB
Python
"""
|
|
OpenAI-kompatible Chat Completion Models.
|
|
|
|
Basiert auf OpenAI API Spezifikation:
|
|
https://platform.openai.com/docs/api-reference/chat/create
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
from typing import Optional, Literal, Any, Union, List, Dict
|
|
from pydantic import BaseModel, Field
|
|
import time
|
|
import uuid
|
|
|
|
|
|
class FunctionCall(BaseModel):
|
|
"""Function call in einer Tool-Anfrage."""
|
|
name: str
|
|
arguments: str # JSON string
|
|
|
|
|
|
class ToolCall(BaseModel):
|
|
"""Tool Call vom Modell."""
|
|
id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex[:12]}")
|
|
type: Literal["function"] = "function"
|
|
function: FunctionCall
|
|
|
|
|
|
class ChatMessage(BaseModel):
|
|
"""Eine Nachricht im Chat."""
|
|
role: Literal["system", "user", "assistant", "tool"]
|
|
content: Optional[str] = None
|
|
name: Optional[str] = None
|
|
tool_call_id: Optional[str] = None
|
|
tool_calls: Optional[list[ToolCall]] = None
|
|
|
|
|
|
class ToolFunction(BaseModel):
|
|
"""Definition einer Tool-Funktion."""
|
|
name: str
|
|
description: Optional[str] = None
|
|
parameters: dict[str, Any] = Field(default_factory=dict)
|
|
|
|
|
|
class Tool(BaseModel):
|
|
"""Tool-Definition für Function Calling."""
|
|
type: Literal["function"] = "function"
|
|
function: ToolFunction
|
|
|
|
|
|
class RequestMetadata(BaseModel):
|
|
"""Zusätzliche Metadaten für die Anfrage."""
|
|
playbook_id: Optional[str] = None
|
|
tenant_id: Optional[str] = None
|
|
user_id: Optional[str] = None
|
|
|
|
|
|
class ChatCompletionRequest(BaseModel):
|
|
"""Request für Chat Completions."""
|
|
model: str
|
|
messages: list[ChatMessage]
|
|
stream: bool = False
|
|
temperature: Optional[float] = Field(default=0.7, ge=0, le=2)
|
|
top_p: Optional[float] = Field(default=1.0, ge=0, le=1)
|
|
max_tokens: Optional[int] = Field(default=None, ge=1)
|
|
stop: Optional[Union[List[str], str]] = None
|
|
presence_penalty: Optional[float] = Field(default=0, ge=-2, le=2)
|
|
frequency_penalty: Optional[float] = Field(default=0, ge=-2, le=2)
|
|
user: Optional[str] = None
|
|
tools: Optional[list[Tool]] = None
|
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None
|
|
metadata: Optional[RequestMetadata] = None
|
|
|
|
|
|
class ChatChoice(BaseModel):
|
|
"""Ein Choice in der Response."""
|
|
index: int = 0
|
|
message: ChatMessage
|
|
finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter"]] = None
|
|
|
|
|
|
class ChatChoiceDelta(BaseModel):
|
|
"""Delta für Streaming Response."""
|
|
role: Optional[str] = None
|
|
content: Optional[str] = None
|
|
tool_calls: Optional[list[ToolCall]] = None
|
|
|
|
|
|
class StreamChoice(BaseModel):
|
|
"""Choice in Streaming Response."""
|
|
index: int = 0
|
|
delta: ChatChoiceDelta
|
|
finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter"]] = None
|
|
|
|
|
|
class Usage(BaseModel):
|
|
"""Token Usage Statistiken."""
|
|
prompt_tokens: int = 0
|
|
completion_tokens: int = 0
|
|
total_tokens: int = 0
|
|
|
|
|
|
class ChatCompletionResponse(BaseModel):
|
|
"""Response für Chat Completions (non-streaming)."""
|
|
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4().hex[:12]}")
|
|
object: Literal["chat.completion"] = "chat.completion"
|
|
created: int = Field(default_factory=lambda: int(time.time()))
|
|
model: str
|
|
choices: list[ChatChoice]
|
|
usage: Optional[Usage] = None
|
|
|
|
|
|
class ChatCompletionChunk(BaseModel):
|
|
"""Chunk für Streaming Response."""
|
|
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4().hex[:12]}")
|
|
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"
|
|
created: int = Field(default_factory=lambda: int(time.time()))
|
|
model: str
|
|
choices: list[StreamChoice]
|
|
|
|
|
|
# Model Info
|
|
class ModelInfo(BaseModel):
|
|
"""Information über ein verfügbares Modell."""
|
|
id: str
|
|
object: Literal["model"] = "model"
|
|
created: int = Field(default_factory=lambda: int(time.time()))
|
|
owned_by: str = "breakpilot"
|
|
description: Optional[str] = None
|
|
context_length: int = 8192
|
|
|
|
|
|
class ModelListResponse(BaseModel):
|
|
"""Response für /v1/models."""
|
|
object: Literal["list"] = "list"
|
|
data: list[ModelInfo]
|