fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
204
backend/tests/test_llm_gateway/test_models.py
Normal file
204
backend/tests/test_llm_gateway/test_models.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
Tests für LLM Gateway Pydantic Models.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from llm_gateway.models.chat import (
|
||||
ChatMessage,
|
||||
ChatCompletionRequest,
|
||||
ChatCompletionResponse,
|
||||
ChatCompletionChunk,
|
||||
ChatChoice,
|
||||
StreamChoice,
|
||||
ChatChoiceDelta,
|
||||
Usage,
|
||||
ModelInfo,
|
||||
ModelListResponse,
|
||||
RequestMetadata,
|
||||
)
|
||||
|
||||
|
||||
class TestChatMessage:
|
||||
"""Tests für ChatMessage Model."""
|
||||
|
||||
def test_user_message(self):
|
||||
"""Test User Message erstellen."""
|
||||
msg = ChatMessage(role="user", content="Hello")
|
||||
assert msg.role == "user"
|
||||
assert msg.content == "Hello"
|
||||
assert msg.name is None
|
||||
|
||||
def test_assistant_message(self):
|
||||
"""Test Assistant Message erstellen."""
|
||||
msg = ChatMessage(role="assistant", content="Hi there!")
|
||||
assert msg.role == "assistant"
|
||||
assert msg.content == "Hi there!"
|
||||
|
||||
def test_system_message(self):
|
||||
"""Test System Message erstellen."""
|
||||
msg = ChatMessage(role="system", content="You are a helpful assistant.")
|
||||
assert msg.role == "system"
|
||||
|
||||
def test_tool_message(self):
|
||||
"""Test Tool Message erstellen."""
|
||||
msg = ChatMessage(role="tool", content='{"result": "success"}', tool_call_id="call_123")
|
||||
assert msg.role == "tool"
|
||||
assert msg.tool_call_id == "call_123"
|
||||
|
||||
|
||||
class TestChatCompletionRequest:
|
||||
"""Tests für ChatCompletionRequest Model."""
|
||||
|
||||
def test_minimal_request(self):
|
||||
"""Test minimale Request."""
|
||||
req = ChatCompletionRequest(
|
||||
model="breakpilot-teacher-8b",
|
||||
messages=[ChatMessage(role="user", content="Hello")],
|
||||
)
|
||||
assert req.model == "breakpilot-teacher-8b"
|
||||
assert len(req.messages) == 1
|
||||
assert req.stream is False
|
||||
assert req.temperature == 0.7
|
||||
|
||||
def test_full_request(self):
|
||||
"""Test vollständige Request."""
|
||||
req = ChatCompletionRequest(
|
||||
model="breakpilot-teacher-70b",
|
||||
messages=[
|
||||
ChatMessage(role="system", content="Du bist ein Assistent."),
|
||||
ChatMessage(role="user", content="Schreibe einen Brief."),
|
||||
],
|
||||
stream=True,
|
||||
temperature=0.5,
|
||||
max_tokens=1000,
|
||||
metadata=RequestMetadata(playbook_id="pb_elternbrief"),
|
||||
)
|
||||
assert req.stream is True
|
||||
assert req.temperature == 0.5
|
||||
assert req.max_tokens == 1000
|
||||
assert req.metadata.playbook_id == "pb_elternbrief"
|
||||
|
||||
def test_temperature_bounds(self):
|
||||
"""Test Temperature Grenzen."""
|
||||
# Gültige Werte
|
||||
req = ChatCompletionRequest(
|
||||
model="test",
|
||||
messages=[ChatMessage(role="user", content="test")],
|
||||
temperature=0.0,
|
||||
)
|
||||
assert req.temperature == 0.0
|
||||
|
||||
req = ChatCompletionRequest(
|
||||
model="test",
|
||||
messages=[ChatMessage(role="user", content="test")],
|
||||
temperature=2.0,
|
||||
)
|
||||
assert req.temperature == 2.0
|
||||
|
||||
# Ungültige Werte
|
||||
with pytest.raises(ValueError):
|
||||
ChatCompletionRequest(
|
||||
model="test",
|
||||
messages=[ChatMessage(role="user", content="test")],
|
||||
temperature=2.5,
|
||||
)
|
||||
|
||||
|
||||
class TestChatCompletionResponse:
|
||||
"""Tests für ChatCompletionResponse Model."""
|
||||
|
||||
def test_response_creation(self):
|
||||
"""Test Response erstellen."""
|
||||
response = ChatCompletionResponse(
|
||||
model="breakpilot-teacher-8b",
|
||||
choices=[
|
||||
ChatChoice(
|
||||
index=0,
|
||||
message=ChatMessage(role="assistant", content="Hello!"),
|
||||
finish_reason="stop",
|
||||
)
|
||||
],
|
||||
usage=Usage(prompt_tokens=10, completion_tokens=5, total_tokens=15),
|
||||
)
|
||||
assert response.object == "chat.completion"
|
||||
assert response.model == "breakpilot-teacher-8b"
|
||||
assert len(response.choices) == 1
|
||||
assert response.choices[0].message.content == "Hello!"
|
||||
assert response.usage.total_tokens == 15
|
||||
|
||||
def test_response_has_id(self):
|
||||
"""Test dass Response eine ID hat."""
|
||||
response = ChatCompletionResponse(
|
||||
model="test",
|
||||
choices=[
|
||||
ChatChoice(
|
||||
message=ChatMessage(role="assistant", content="test"),
|
||||
)
|
||||
],
|
||||
)
|
||||
assert response.id.startswith("chatcmpl-")
|
||||
assert len(response.id) > 10
|
||||
|
||||
|
||||
class TestChatCompletionChunk:
|
||||
"""Tests für Streaming Chunks."""
|
||||
|
||||
def test_chunk_creation(self):
|
||||
"""Test Chunk erstellen."""
|
||||
chunk = ChatCompletionChunk(
|
||||
model="breakpilot-teacher-8b",
|
||||
choices=[
|
||||
StreamChoice(
|
||||
index=0,
|
||||
delta=ChatChoiceDelta(content="Hello"),
|
||||
finish_reason=None,
|
||||
)
|
||||
],
|
||||
)
|
||||
assert chunk.object == "chat.completion.chunk"
|
||||
assert chunk.choices[0].delta.content == "Hello"
|
||||
|
||||
def test_final_chunk(self):
|
||||
"""Test Final Chunk mit finish_reason."""
|
||||
chunk = ChatCompletionChunk(
|
||||
model="test",
|
||||
choices=[
|
||||
StreamChoice(
|
||||
index=0,
|
||||
delta=ChatChoiceDelta(),
|
||||
finish_reason="stop",
|
||||
)
|
||||
],
|
||||
)
|
||||
assert chunk.choices[0].finish_reason == "stop"
|
||||
|
||||
|
||||
class TestModelInfo:
|
||||
"""Tests für ModelInfo."""
|
||||
|
||||
def test_model_info(self):
|
||||
"""Test ModelInfo erstellen."""
|
||||
model = ModelInfo(
|
||||
id="breakpilot-teacher-8b",
|
||||
owned_by="breakpilot",
|
||||
description="Test model",
|
||||
context_length=8192,
|
||||
)
|
||||
assert model.id == "breakpilot-teacher-8b"
|
||||
assert model.object == "model"
|
||||
assert model.context_length == 8192
|
||||
|
||||
|
||||
class TestModelListResponse:
|
||||
"""Tests für ModelListResponse."""
|
||||
|
||||
def test_model_list(self):
|
||||
"""Test Model List erstellen."""
|
||||
response = ModelListResponse(
|
||||
data=[
|
||||
ModelInfo(id="model-1", owned_by="test"),
|
||||
ModelInfo(id="model-2", owned_by="test"),
|
||||
]
|
||||
)
|
||||
assert response.object == "list"
|
||||
assert len(response.data) == 2
|
||||
Reference in New Issue
Block a user