fix: Restore all files lost during destructive rebase

A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-02-09 09:51:32 +01:00
parent f7487ee240
commit bfdaf63ba9
2009 changed files with 749983 additions and 1731 deletions

View File

@@ -0,0 +1,16 @@
"""
Integration tests that require external services.
These tests run in the Woodpecker CI integration pipeline
(.woodpecker/integration.yml) which provides:
- PostgreSQL database
- Valkey/Redis cache
To run locally:
docker compose -f docker-compose.test.yml up -d postgres-test valkey-test
export DATABASE_URL=postgresql://breakpilot:breakpilot_test@localhost:55432/breakpilot_test
export VALKEY_URL=redis://localhost:56379
export SKIP_INTEGRATION_TESTS=false
pytest tests/test_integration/ -v
docker compose -f docker-compose.test.yml down -v
"""

View File

@@ -0,0 +1,186 @@
"""
Integration tests for database and cache connectivity.
These tests verify that the CI pipeline can connect to:
- PostgreSQL database
- Valkey/Redis cache
Run with: pytest tests/test_integration/test_db_connection.py -v
"""
import os
import pytest
@pytest.mark.integration
def test_database_connection():
"""Test that we can connect to PostgreSQL."""
import psycopg2
db_url = os.environ.get("DATABASE_URL")
assert db_url is not None, "DATABASE_URL not set"
# Parse connection parameters from URL
# Format: postgresql://user:password@host:port/dbname
conn = psycopg2.connect(db_url)
try:
cur = conn.cursor()
cur.execute("SELECT 1")
result = cur.fetchone()
assert result[0] == 1, "Database query returned unexpected result"
# Test database version
cur.execute("SELECT version()")
version = cur.fetchone()[0]
assert "PostgreSQL" in version, f"Unexpected database: {version}"
print(f"Connected to: {version.split(',')[0]}")
finally:
conn.close()
@pytest.mark.integration
def test_database_can_create_table():
"""Test that we can create and drop tables."""
import psycopg2
db_url = os.environ.get("DATABASE_URL")
assert db_url is not None, "DATABASE_URL not set"
conn = psycopg2.connect(db_url)
conn.autocommit = True
try:
cur = conn.cursor()
# Create test table
cur.execute("""
CREATE TABLE IF NOT EXISTS _ci_test_table (
id SERIAL PRIMARY KEY,
name VARCHAR(100),
created_at TIMESTAMP DEFAULT NOW()
)
""")
# Insert test data
cur.execute(
"INSERT INTO _ci_test_table (name) VALUES (%s) RETURNING id",
("integration_test",)
)
inserted_id = cur.fetchone()[0]
assert inserted_id is not None, "Insert failed"
# Read back
cur.execute("SELECT name FROM _ci_test_table WHERE id = %s", (inserted_id,))
name = cur.fetchone()[0]
assert name == "integration_test", f"Read back failed: {name}"
# Cleanup
cur.execute("DROP TABLE IF EXISTS _ci_test_table")
finally:
conn.close()
@pytest.mark.integration
def test_valkey_connection():
"""Test that we can connect to Valkey/Redis."""
import redis
valkey_url = os.environ.get("VALKEY_URL") or os.environ.get("REDIS_URL")
assert valkey_url is not None, "VALKEY_URL or REDIS_URL not set"
r = redis.from_url(valkey_url)
try:
# Test ping
assert r.ping() is True, "Valkey ping failed"
# Test set/get
test_key = "_ci_test_key"
test_value = "integration_test_value"
r.set(test_key, test_value)
result = r.get(test_key)
assert result == test_value.encode(), f"Get returned: {result}"
# Cleanup
r.delete(test_key)
assert r.get(test_key) is None, "Delete failed"
# Get server info
info = r.info("server")
server_version = info.get("redis_version", "unknown")
print(f"Connected to Valkey/Redis version: {server_version}")
finally:
r.close()
@pytest.mark.integration
def test_valkey_can_store_json():
"""Test that Valkey can store and retrieve JSON data."""
import redis
import json
valkey_url = os.environ.get("VALKEY_URL") or os.environ.get("REDIS_URL")
assert valkey_url is not None, "VALKEY_URL or REDIS_URL not set"
r = redis.from_url(valkey_url)
try:
test_key = "_ci_test_json"
test_data = {
"user_id": "test-123",
"session": {"active": True, "created": "2025-01-01"},
"scores": [85, 90, 78]
}
# Store as JSON
r.set(test_key, json.dumps(test_data))
# Retrieve and parse
result = r.get(test_key)
parsed = json.loads(result)
assert parsed["user_id"] == "test-123"
assert parsed["session"]["active"] is True
assert parsed["scores"] == [85, 90, 78]
# Cleanup
r.delete(test_key)
finally:
r.close()
@pytest.mark.integration
def test_valkey_expiration():
"""Test that Valkey TTL/expiration works."""
import redis
import time
valkey_url = os.environ.get("VALKEY_URL") or os.environ.get("REDIS_URL")
assert valkey_url is not None, "VALKEY_URL or REDIS_URL not set"
r = redis.from_url(valkey_url)
try:
test_key = "_ci_test_expiry"
# Set with 2 second TTL
r.setex(test_key, 2, "temporary_value")
# Should exist immediately
assert r.get(test_key) is not None, "Key should exist"
# Check TTL
ttl = r.ttl(test_key)
assert 0 < ttl <= 2, f"TTL should be 1-2, got {ttl}"
# Wait for expiration
time.sleep(3)
# Should be gone
assert r.get(test_key) is None, "Key should have expired"
finally:
# Cleanup (in case test failed before expiration)
r.delete(test_key)
r.close()

View File

@@ -0,0 +1,352 @@
"""
Integration Tests for EduSearch Seeds API.
These tests require a running PostgreSQL database and test the full
request-response cycle through the FastAPI application.
Run with: pytest tests/test_integration/test_edu_search_seeds_integration.py -v
"""
import pytest
import httpx
import os
import uuid
from typing import Generator
# Test configuration
API_BASE = os.environ.get("TEST_API_BASE", "http://localhost:8082")
SKIP_INTEGRATION = os.environ.get("SKIP_INTEGRATION_TESTS", "false").lower() == "true"
# Check if server is reachable
def _check_server_available():
"""Check if the API server is reachable."""
if SKIP_INTEGRATION:
return False
try:
with httpx.Client(timeout=2.0) as client:
client.get(f"{API_BASE}/health")
return True
except (httpx.ConnectError, httpx.TimeoutException):
return False
SERVER_AVAILABLE = _check_server_available()
pytestmark = pytest.mark.skipif(
not SERVER_AVAILABLE,
reason=f"Integration tests skipped (server at {API_BASE} not available)"
)
@pytest.fixture
def api_client() -> Generator[httpx.Client, None, None]:
"""Create HTTP client for API calls."""
with httpx.Client(base_url=API_BASE, timeout=30.0) as client:
yield client
@pytest.fixture
def async_api_client():
"""Create async HTTP client for API calls."""
return httpx.AsyncClient(base_url=API_BASE, timeout=30.0)
class TestHealthEndpoint:
"""Basic connectivity tests."""
def test_api_is_reachable(self, api_client: httpx.Client):
"""Test that the API is reachable."""
response = api_client.get("/health")
assert response.status_code == 200
class TestCategoriesIntegration:
"""Integration tests for categories endpoint."""
def test_list_categories_returns_default_categories(self, api_client: httpx.Client):
"""Test that default categories are returned."""
response = api_client.get("/v1/edu-search/categories")
assert response.status_code == 200
data = response.json()
assert "categories" in data
# Check for expected default categories
category_names = [c["name"] for c in data["categories"]]
expected = ["federal", "states", "science", "portals"]
for expected_cat in expected:
assert expected_cat in category_names, f"Missing category: {expected_cat}"
class TestSeedsWorkflow:
"""Integration tests for complete seeds workflow."""
@pytest.fixture
def test_seed_url(self):
"""Generate unique URL for test seed."""
return f"https://test-seed-{uuid.uuid4().hex[:8]}.de"
def test_create_read_update_delete_seed(self, api_client: httpx.Client, test_seed_url: str):
"""Test complete CRUD workflow for a seed."""
# CREATE
create_response = api_client.post(
"/v1/edu-search/seeds",
json={
"url": test_seed_url,
"name": "Integration Test Seed",
"description": "Created by integration test",
"trust_boost": 0.75,
"enabled": True
}
)
assert create_response.status_code == 200
create_data = create_response.json()
assert create_data["status"] == "created"
seed_id = create_data["id"]
try:
# READ
get_response = api_client.get(f"/v1/edu-search/seeds/{seed_id}")
assert get_response.status_code == 200
seed_data = get_response.json()
assert seed_data["url"] == test_seed_url
assert seed_data["name"] == "Integration Test Seed"
assert seed_data["trust_boost"] == 0.75
# UPDATE
update_response = api_client.put(
f"/v1/edu-search/seeds/{seed_id}",
json={
"name": "Updated Test Seed",
"enabled": False
}
)
assert update_response.status_code == 200
# Verify update
verify_response = api_client.get(f"/v1/edu-search/seeds/{seed_id}")
assert verify_response.status_code == 200
updated_data = verify_response.json()
assert updated_data["name"] == "Updated Test Seed"
assert updated_data["enabled"] is False
finally:
# DELETE (cleanup)
delete_response = api_client.delete(f"/v1/edu-search/seeds/{seed_id}")
assert delete_response.status_code == 200
# Verify deletion
verify_delete = api_client.get(f"/v1/edu-search/seeds/{seed_id}")
assert verify_delete.status_code == 404
def test_list_seeds_with_filters(self, api_client: httpx.Client, test_seed_url: str):
"""Test listing seeds with various filters."""
# Create a test seed first
create_response = api_client.post(
"/v1/edu-search/seeds",
json={
"url": test_seed_url,
"name": "Filter Test Seed",
"enabled": True
}
)
assert create_response.status_code == 200
seed_id = create_response.json()["id"]
try:
# List all seeds
list_response = api_client.get("/v1/edu-search/seeds")
assert list_response.status_code == 200
assert "seeds" in list_response.json()
assert "total" in list_response.json()
# List with enabled filter
enabled_response = api_client.get("/v1/edu-search/seeds?enabled=true")
assert enabled_response.status_code == 200
# List with pagination
paginated_response = api_client.get("/v1/edu-search/seeds?limit=10&offset=0")
assert paginated_response.status_code == 200
assert paginated_response.json()["limit"] == 10
finally:
api_client.delete(f"/v1/edu-search/seeds/{seed_id}")
def test_duplicate_url_rejected(self, api_client: httpx.Client, test_seed_url: str):
"""Test that duplicate URLs are rejected."""
# Create first seed
first_response = api_client.post(
"/v1/edu-search/seeds",
json={"url": test_seed_url, "name": "First Seed"}
)
assert first_response.status_code == 200
seed_id = first_response.json()["id"]
try:
# Try to create duplicate
duplicate_response = api_client.post(
"/v1/edu-search/seeds",
json={"url": test_seed_url, "name": "Duplicate Seed"}
)
assert duplicate_response.status_code == 400
assert "existiert bereits" in duplicate_response.json()["detail"]
finally:
api_client.delete(f"/v1/edu-search/seeds/{seed_id}")
class TestBulkImportIntegration:
"""Integration tests for bulk import functionality."""
def test_bulk_import_multiple_seeds(self, api_client: httpx.Client):
"""Test importing multiple seeds at once."""
unique_suffix = uuid.uuid4().hex[:8]
seeds_to_import = [
{"url": f"https://bulk-test-1-{unique_suffix}.de", "name": "Bulk Test 1", "category": "federal"},
{"url": f"https://bulk-test-2-{unique_suffix}.de", "name": "Bulk Test 2", "category": "states"},
{"url": f"https://bulk-test-3-{unique_suffix}.de", "name": "Bulk Test 3", "category": "science"}
]
response = api_client.post(
"/v1/edu-search/seeds/bulk-import",
json={"seeds": seeds_to_import}
)
assert response.status_code == 200
data = response.json()
assert data["status"] == "imported"
assert data["imported"] == 3
assert data["skipped"] == 0
# Cleanup - find and delete imported seeds
for seed in seeds_to_import:
list_response = api_client.get("/v1/edu-search/seeds")
for s in list_response.json()["seeds"]:
if s["url"] == seed["url"]:
api_client.delete(f"/v1/edu-search/seeds/{s['id']}")
def test_bulk_import_skips_duplicates(self, api_client: httpx.Client):
"""Test that bulk import skips existing URLs."""
unique_url = f"https://bulk-dup-test-{uuid.uuid4().hex[:8]}.de"
# First import
first_response = api_client.post(
"/v1/edu-search/seeds/bulk-import",
json={"seeds": [{"url": unique_url, "name": "First Import"}]}
)
assert first_response.status_code == 200
assert first_response.json()["imported"] == 1
try:
# Second import with same URL
second_response = api_client.post(
"/v1/edu-search/seeds/bulk-import",
json={"seeds": [{"url": unique_url, "name": "Duplicate Import"}]}
)
assert second_response.status_code == 200
assert second_response.json()["imported"] == 0
assert second_response.json()["skipped"] == 1
finally:
# Cleanup
list_response = api_client.get("/v1/edu-search/seeds")
for s in list_response.json()["seeds"]:
if s["url"] == unique_url:
api_client.delete(f"/v1/edu-search/seeds/{s['id']}")
class TestStatsIntegration:
"""Integration tests for statistics endpoint."""
def test_get_stats_returns_valid_structure(self, api_client: httpx.Client):
"""Test that stats endpoint returns expected structure."""
response = api_client.get("/v1/edu-search/stats")
assert response.status_code == 200
data = response.json()
assert "total_seeds" in data
assert "enabled_seeds" in data
assert "avg_trust_boost" in data
assert "seeds_per_category" in data
# Verify types
assert isinstance(data["total_seeds"], int)
assert isinstance(data["enabled_seeds"], int)
assert isinstance(data["avg_trust_boost"], (int, float))
assert isinstance(data["seeds_per_category"], dict)
class TestExportForCrawlerIntegration:
"""Integration tests for crawler export endpoint."""
def test_export_returns_valid_format(self, api_client: httpx.Client):
"""Test that export endpoint returns crawler-compatible format."""
response = api_client.get("/v1/edu-search/seeds/export/for-crawler")
assert response.status_code == 200
data = response.json()
assert "seeds" in data
assert "generated_at" in data
assert "total" in data
# Verify seed format
if data["seeds"]:
seed = data["seeds"][0]
assert "url" in seed
assert "name" in seed
assert "trust_boost" in seed
assert "crawl_depth" in seed
def test_export_only_includes_enabled_seeds(self, api_client: httpx.Client):
"""Test that export only includes enabled seeds."""
unique_url = f"https://export-test-{uuid.uuid4().hex[:8]}.de"
# Create disabled seed
create_response = api_client.post(
"/v1/edu-search/seeds",
json={"url": unique_url, "name": "Disabled Seed", "enabled": False}
)
assert create_response.status_code == 200
seed_id = create_response.json()["id"]
try:
export_response = api_client.get("/v1/edu-search/seeds/export/for-crawler")
assert export_response.status_code == 200
# Verify disabled seed is not in export
exported_urls = [s["url"] for s in export_response.json()["seeds"]]
assert unique_url not in exported_urls
finally:
api_client.delete(f"/v1/edu-search/seeds/{seed_id}")
class TestErrorHandling:
"""Integration tests for error handling."""
def test_get_nonexistent_seed_returns_404(self, api_client: httpx.Client):
"""Test that getting non-existent seed returns 404."""
fake_id = str(uuid.uuid4())
response = api_client.get(f"/v1/edu-search/seeds/{fake_id}")
assert response.status_code == 404
def test_invalid_uuid_returns_400(self, api_client: httpx.Client):
"""Test that invalid UUID returns 400."""
response = api_client.get("/v1/edu-search/seeds/not-a-uuid")
assert response.status_code == 400
def test_create_seed_with_missing_required_fields(self, api_client: httpx.Client):
"""Test that missing required fields returns 422."""
response = api_client.post(
"/v1/edu-search/seeds",
json={"name": "Missing URL"} # url is required
)
assert response.status_code == 422
def test_create_seed_with_invalid_url(self, api_client: httpx.Client):
"""Test that invalid URL format is rejected."""
response = api_client.post(
"/v1/edu-search/seeds",
json={"url": "not-a-valid-url", "name": "Invalid URL"}
)
# Should be 422 (validation error) or 400
assert response.status_code in [400, 422]

View File

@@ -0,0 +1,301 @@
"""
Integration Tests für LibreChat + Tavily Web Search.
Diese Tests prüfen:
1. Tavily API Konnektivität
2. LibreChat Container Health
3. End-to-End Web Search Flow
"""
import os
import pytest
import httpx
from unittest.mock import patch, AsyncMock
# Test-Konfiguration
TAVILY_API_KEY = os.getenv("TAVILY_API_KEY", "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")
LIBRECHAT_URL = os.getenv("LIBRECHAT_URL", "http://localhost:3080")
TAVILY_API_URL = "https://api.tavily.com"
class TestTavilyAPIConnectivity:
"""Tests für direkte Tavily API Verbindung."""
@pytest.mark.asyncio
async def test_tavily_api_health(self):
"""Test: Tavily API ist erreichbar und antwortet."""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "test query",
"max_results": 1
}
)
assert response.status_code == 200
data = response.json()
assert "results" in data
assert "query" in data
@pytest.mark.asyncio
async def test_tavily_search_returns_results(self):
"""Test: Tavily gibt Suchergebnisse zurück."""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "LibreChat AI chat platform",
"max_results": 3
}
)
assert response.status_code == 200
data = response.json()
# Prüfe Struktur
assert "results" in data
assert len(data["results"]) > 0
# Prüfe erstes Ergebnis
first_result = data["results"][0]
assert "url" in first_result
assert "title" in first_result
assert "content" in first_result
assert "score" in first_result
@pytest.mark.asyncio
async def test_tavily_invalid_api_key(self):
"""Test: Tavily gibt Fehler bei ungültigem API Key."""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": "invalid-key-12345",
"query": "test",
"max_results": 1
}
)
# Sollte 401 oder 403 zurückgeben
assert response.status_code in [401, 403, 400]
@pytest.mark.asyncio
async def test_tavily_search_depth_basic(self):
"""Test: Tavily basic search depth funktioniert."""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "Python programming",
"search_depth": "basic",
"max_results": 2
}
)
assert response.status_code == 200
data = response.json()
assert "response_time" in data
@pytest.mark.asyncio
async def test_tavily_german_query(self):
"""Test: Tavily kann deutsche Suchanfragen verarbeiten."""
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "Datenschutz Schulen Deutschland",
"max_results": 3
}
)
assert response.status_code == 200
data = response.json()
assert len(data["results"]) > 0
class TestLibreChatHealth:
"""Tests für LibreChat Container Health."""
@pytest.mark.asyncio
async def test_librechat_api_health(self):
"""Test: LibreChat API ist erreichbar."""
async with httpx.AsyncClient(timeout=10.0) as client:
try:
response = await client.get(f"{LIBRECHAT_URL}/api/health")
# LibreChat hat keinen /api/health, aber / sollte funktionieren
if response.status_code == 404:
response = await client.get(f"{LIBRECHAT_URL}/")
assert response.status_code in [200, 301, 302]
except httpx.ConnectError:
pytest.skip("LibreChat Container nicht erreichbar")
@pytest.mark.asyncio
async def test_librechat_frontend_loads(self):
"""Test: LibreChat Frontend lädt."""
async with httpx.AsyncClient(timeout=10.0) as client:
try:
response = await client.get(f"{LIBRECHAT_URL}/")
assert response.status_code in [200, 301, 302]
# Prüfe ob HTML zurückkommt
if response.status_code == 200:
assert "html" in response.headers.get("content-type", "").lower() or \
"<!DOCTYPE" in response.text[:100]
except httpx.ConnectError:
pytest.skip("LibreChat Container nicht erreichbar")
class TestTavilyConfigValidation:
"""Tests für Tavily Konfigurationsvalidierung."""
def test_tavily_api_key_format(self):
"""Test: Tavily API Key hat korrektes Format."""
# Tavily Keys beginnen mit "tvly-"
assert TAVILY_API_KEY.startswith("tvly-"), \
f"Tavily API Key sollte mit 'tvly-' beginnen, ist aber: {TAVILY_API_KEY[:10]}..."
def test_tavily_api_key_length(self):
"""Test: Tavily API Key hat korrekte Länge."""
# Tavily Keys sind typischerweise ~40 Zeichen
assert len(TAVILY_API_KEY) > 30, \
f"Tavily API Key zu kurz: {len(TAVILY_API_KEY)} Zeichen"
def test_tavily_api_key_not_placeholder(self):
"""Test: Tavily API Key ist kein Platzhalter."""
placeholders = [
"your-tavily-api-key",
"TAVILY_API_KEY",
"tvly-xxx",
"tvly-placeholder",
]
assert TAVILY_API_KEY not in placeholders, \
"Tavily API Key ist noch ein Platzhalter"
class TestBreakPilotTavilyIntegration:
"""Tests für BreakPilot Backend Tavily Integration."""
@pytest.mark.asyncio
async def test_breakpilot_tool_gateway_available(self):
"""Test: BreakPilot Tool Gateway ist verfügbar."""
from llm_gateway.services.tool_gateway import ToolGateway, ToolGatewayConfig
config = ToolGatewayConfig(tavily_api_key=TAVILY_API_KEY)
gateway = ToolGateway(config)
assert gateway.tavily_available is True
@pytest.mark.asyncio
async def test_breakpilot_pii_redaction_before_tavily(self):
"""Test: PII wird vor Tavily-Anfragen redaktiert."""
from llm_gateway.services.pii_detector import PIIDetector
detector = PIIDetector()
# Text mit PII
query_with_pii = "Suche Informationen über max.mustermann@schule.de in Klasse 5a"
result = detector.redact(query_with_pii)
# PII sollte redaktiert sein
assert "max.mustermann@schule.de" not in result.redacted_text
assert result.pii_found is True
assert len(result.matches) > 0
# E-Mail sollte als [EMAIL_REDACTED] redaktiert sein
assert "[EMAIL_REDACTED]" in result.redacted_text
@pytest.mark.asyncio
async def test_breakpilot_tavily_search_with_pii_protection(self):
"""Test: Tavily Search mit PII-Schutz funktioniert."""
from llm_gateway.services.tool_gateway import ToolGateway, ToolGatewayConfig
config = ToolGatewayConfig(tavily_api_key=TAVILY_API_KEY)
gateway = ToolGateway(config)
# Suche mit PII (wird automatisch redaktiert)
result = await gateway.search(
query="Datenschutz Email hans.mueller@example.com",
max_results=2
)
# Wichtig: PII-Schutz hat funktioniert
assert result is not None
assert result.pii_detected is True
assert "email" in result.pii_types
assert result.redacted_query is not None
assert "hans.mueller@example.com" not in result.redacted_query
assert "[EMAIL_REDACTED]" in result.redacted_query
# Ergebnisse sind optional - die redaktierte Query kann leer sein
assert result.results is not None # Liste existiert (kann leer sein)
class TestEndToEndFlow:
"""End-to-End Tests für den kompletten Flow."""
@pytest.mark.asyncio
async def test_complete_search_flow(self):
"""Test: Kompletter Such-Flow von Anfrage bis Ergebnis."""
# 1. Tavily API direkt
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "Schulrecht Deutschland aktuelle Änderungen",
"max_results": 3,
"search_depth": "basic"
}
)
assert response.status_code == 200
data = response.json()
# Validiere Ergebnisse
assert len(data["results"]) > 0
for result in data["results"]:
assert "url" in result
assert "title" in result
assert result["url"].startswith("http")
@pytest.mark.asyncio
async def test_search_response_time(self):
"""Test: Tavily antwortet in akzeptabler Zeit."""
import time
async with httpx.AsyncClient(timeout=30.0) as client:
start = time.time()
response = await client.post(
f"{TAVILY_API_URL}/search",
json={
"api_key": TAVILY_API_KEY,
"query": "test query",
"max_results": 3
}
)
elapsed = time.time() - start
assert response.status_code == 200
# Sollte unter 10 Sekunden antworten
assert elapsed < 10.0, f"Tavily Antwortzeit zu lang: {elapsed:.2f}s"
# Fixtures für gemeinsame Test-Ressourcen
@pytest.fixture
def tavily_api_key():
"""Fixture für Tavily API Key."""
return TAVILY_API_KEY
@pytest.fixture
def librechat_url():
"""Fixture für LibreChat URL."""
return LIBRECHAT_URL