This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
BreakPilot Dev 19855efacc
Some checks failed
Tests / Go Tests (push) Has been cancelled
Tests / Python Tests (push) Has been cancelled
Tests / Integration Tests (push) Has been cancelled
Tests / Go Lint (push) Has been cancelled
Tests / Python Lint (push) Has been cancelled
Tests / Security Scan (push) Has been cancelled
Tests / All Checks Passed (push) Has been cancelled
Security Scanning / Secret Scanning (push) Has been cancelled
Security Scanning / Dependency Vulnerability Scan (push) Has been cancelled
Security Scanning / Go Security Scan (push) Has been cancelled
Security Scanning / Python Security Scan (push) Has been cancelled
Security Scanning / Node.js Security Scan (push) Has been cancelled
Security Scanning / Docker Image Security (push) Has been cancelled
Security Scanning / Security Summary (push) Has been cancelled
CI/CD Pipeline / Go Tests (push) Has been cancelled
CI/CD Pipeline / Python Tests (push) Has been cancelled
CI/CD Pipeline / Website Tests (push) Has been cancelled
CI/CD Pipeline / Linting (push) Has been cancelled
CI/CD Pipeline / Security Scan (push) Has been cancelled
CI/CD Pipeline / Docker Build & Push (push) Has been cancelled
CI/CD Pipeline / Integration Tests (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / CI Summary (push) Has been cancelled
ci/woodpecker/manual/build-ci-image Pipeline was successful
ci/woodpecker/manual/main Pipeline failed
feat: BreakPilot PWA - Full codebase (clean push without large binaries)
All services: admin-v2, studio-v2, website, ai-compliance-sdk,
consent-service, klausur-service, voice-service, and infrastructure.
Large PDFs and compiled binaries excluded via .gitignore.
2026-02-11 13:25:58 +01:00

136 lines
4.0 KiB
Python

"""
OpenAI-kompatible Chat Completion Models.
Basiert auf OpenAI API Spezifikation:
https://platform.openai.com/docs/api-reference/chat/create
"""
from __future__ import annotations
from typing import Optional, Literal, Any, Union, List, Dict
from pydantic import BaseModel, Field
import time
import uuid
class FunctionCall(BaseModel):
"""Function call in einer Tool-Anfrage."""
name: str
arguments: str # JSON string
class ToolCall(BaseModel):
"""Tool Call vom Modell."""
id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex[:12]}")
type: Literal["function"] = "function"
function: FunctionCall
class ChatMessage(BaseModel):
"""Eine Nachricht im Chat."""
role: Literal["system", "user", "assistant", "tool"]
content: Optional[str] = None
name: Optional[str] = None
tool_call_id: Optional[str] = None
tool_calls: Optional[list[ToolCall]] = None
class ToolFunction(BaseModel):
"""Definition einer Tool-Funktion."""
name: str
description: Optional[str] = None
parameters: dict[str, Any] = Field(default_factory=dict)
class Tool(BaseModel):
"""Tool-Definition für Function Calling."""
type: Literal["function"] = "function"
function: ToolFunction
class RequestMetadata(BaseModel):
"""Zusätzliche Metadaten für die Anfrage."""
playbook_id: Optional[str] = None
tenant_id: Optional[str] = None
user_id: Optional[str] = None
class ChatCompletionRequest(BaseModel):
"""Request für Chat Completions."""
model: str
messages: list[ChatMessage]
stream: bool = False
temperature: Optional[float] = Field(default=0.7, ge=0, le=2)
top_p: Optional[float] = Field(default=1.0, ge=0, le=1)
max_tokens: Optional[int] = Field(default=None, ge=1)
stop: Optional[Union[List[str], str]] = None
presence_penalty: Optional[float] = Field(default=0, ge=-2, le=2)
frequency_penalty: Optional[float] = Field(default=0, ge=-2, le=2)
user: Optional[str] = None
tools: Optional[list[Tool]] = None
tool_choice: Optional[Union[str, Dict[str, Any]]] = None
metadata: Optional[RequestMetadata] = None
class ChatChoice(BaseModel):
"""Ein Choice in der Response."""
index: int = 0
message: ChatMessage
finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter"]] = None
class ChatChoiceDelta(BaseModel):
"""Delta für Streaming Response."""
role: Optional[str] = None
content: Optional[str] = None
tool_calls: Optional[list[ToolCall]] = None
class StreamChoice(BaseModel):
"""Choice in Streaming Response."""
index: int = 0
delta: ChatChoiceDelta
finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter"]] = None
class Usage(BaseModel):
"""Token Usage Statistiken."""
prompt_tokens: int = 0
completion_tokens: int = 0
total_tokens: int = 0
class ChatCompletionResponse(BaseModel):
"""Response für Chat Completions (non-streaming)."""
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4().hex[:12]}")
object: Literal["chat.completion"] = "chat.completion"
created: int = Field(default_factory=lambda: int(time.time()))
model: str
choices: list[ChatChoice]
usage: Optional[Usage] = None
class ChatCompletionChunk(BaseModel):
"""Chunk für Streaming Response."""
id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4().hex[:12]}")
object: Literal["chat.completion.chunk"] = "chat.completion.chunk"
created: int = Field(default_factory=lambda: int(time.time()))
model: str
choices: list[StreamChoice]
# Model Info
class ModelInfo(BaseModel):
"""Information über ein verfügbares Modell."""
id: str
object: Literal["model"] = "model"
created: int = Field(default_factory=lambda: int(time.time()))
owned_by: str = "breakpilot"
description: Optional[str] = None
context_length: int = 8192
class ModelListResponse(BaseModel):
"""Response für /v1/models."""
object: Literal["list"] = "list"
data: list[ModelInfo]