klausur-service (11 files): - cv_gutter_repair, ocr_pipeline_regression, upload_api - ocr_pipeline_sessions, smart_spell, nru_worksheet_generator - ocr_pipeline_overlays, mail/aggregator, zeugnis_api - cv_syllable_detect, self_rag backend-lehrer (17 files): - classroom_engine/suggestions, generators/quiz_generator - worksheets_api, llm_gateway/comparison, state_engine_api - classroom/models (→ 4 submodules), services/file_processor - alerts_agent/api/wizard+digests+routes, content_generators/pdf - classroom/routes/sessions, llm_gateway/inference - classroom_engine/analytics, auth/keycloak_auth - alerts_agent/processing/rule_engine, ai_processor/print_versions agent-core (5 files): - brain/memory_store, brain/knowledge_graph, brain/context_manager - orchestrator/supervisor, sessions/session_manager admin-lehrer (5 components): - GridOverlay, StepGridReview, DevOpsPipelineSidebar - DataFlowDiagram, sbom/wizard/page website (2 files): - DependencyMap, lehrer/abitur-archiv Other: nibis_ingestion, grid_detection_service, export-doclayout-onnx Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
226 lines
5.7 KiB
Python
226 lines
5.7 KiB
Python
"""
|
|
Context Management for Breakpilot Agents
|
|
|
|
Manages conversation contexts for multiple sessions with persistence.
|
|
"""
|
|
|
|
from typing import Dict, Any, List, Optional, Callable, Awaitable
|
|
import json
|
|
import logging
|
|
|
|
from agent_core.brain.context_models import (
|
|
MessageRole,
|
|
Message,
|
|
ConversationContext,
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class ContextManager:
|
|
"""
|
|
Manages conversation contexts for multiple sessions.
|
|
|
|
Provides:
|
|
- Context creation and retrieval
|
|
- Persistence to Valkey/PostgreSQL
|
|
- Context sharing between agents
|
|
"""
|
|
|
|
def __init__(
|
|
self,
|
|
redis_client=None,
|
|
db_pool=None,
|
|
namespace: str = "breakpilot"
|
|
):
|
|
"""
|
|
Initialize the context manager.
|
|
|
|
Args:
|
|
redis_client: Async Redis/Valkey client
|
|
db_pool: Async PostgreSQL connection pool
|
|
namespace: Key namespace
|
|
"""
|
|
self.redis = redis_client
|
|
self.db_pool = db_pool
|
|
self.namespace = namespace
|
|
self._contexts: Dict[str, ConversationContext] = {}
|
|
self._summarize_callback: Optional[Callable[[List[Message]], Awaitable[str]]] = None
|
|
|
|
def _redis_key(self, session_id: str) -> str:
|
|
"""Generate Redis key for context"""
|
|
return f"{self.namespace}:context:{session_id}"
|
|
|
|
def create_context(
|
|
self,
|
|
session_id: str,
|
|
system_prompt: Optional[str] = None,
|
|
max_messages: int = 50
|
|
) -> ConversationContext:
|
|
"""
|
|
Creates a new conversation context.
|
|
|
|
Args:
|
|
session_id: Session ID for this context
|
|
system_prompt: Optional system prompt
|
|
max_messages: Maximum messages before compression
|
|
|
|
Returns:
|
|
The created context
|
|
"""
|
|
context = ConversationContext(
|
|
max_messages=max_messages,
|
|
system_prompt=system_prompt
|
|
)
|
|
self._contexts[session_id] = context
|
|
return context
|
|
|
|
async def get_context(self, session_id: str) -> Optional[ConversationContext]:
|
|
"""
|
|
Gets a context by session ID.
|
|
|
|
Args:
|
|
session_id: The session ID
|
|
|
|
Returns:
|
|
ConversationContext or None
|
|
"""
|
|
# Check local cache
|
|
if session_id in self._contexts:
|
|
return self._contexts[session_id]
|
|
|
|
# Try Valkey
|
|
context = await self._get_from_valkey(session_id)
|
|
if context:
|
|
self._contexts[session_id] = context
|
|
return context
|
|
|
|
return None
|
|
|
|
async def save_context(self, session_id: str) -> None:
|
|
"""
|
|
Saves a context to persistent storage.
|
|
|
|
Args:
|
|
session_id: The session ID
|
|
"""
|
|
if session_id not in self._contexts:
|
|
return
|
|
|
|
context = self._contexts[session_id]
|
|
await self._cache_in_valkey(session_id, context)
|
|
|
|
async def delete_context(self, session_id: str) -> bool:
|
|
"""
|
|
Deletes a context.
|
|
|
|
Args:
|
|
session_id: The session ID
|
|
|
|
Returns:
|
|
True if deleted
|
|
"""
|
|
self._contexts.pop(session_id, None)
|
|
|
|
if self.redis:
|
|
await self.redis.delete(self._redis_key(session_id))
|
|
|
|
return True
|
|
|
|
def set_summarize_callback(
|
|
self,
|
|
callback: Callable[[List[Message]], Awaitable[str]]
|
|
) -> None:
|
|
"""
|
|
Sets a callback for LLM-based summarization.
|
|
|
|
Args:
|
|
callback: Async function that takes messages and returns summary
|
|
"""
|
|
self._summarize_callback = callback
|
|
|
|
async def add_message(
|
|
self,
|
|
session_id: str,
|
|
role: MessageRole,
|
|
content: str,
|
|
metadata: Optional[Dict[str, Any]] = None
|
|
) -> Optional[Message]:
|
|
"""
|
|
Adds a message to a session's context.
|
|
|
|
Args:
|
|
session_id: The session ID
|
|
role: Message role
|
|
content: Message content
|
|
metadata: Optional metadata
|
|
|
|
Returns:
|
|
The created message or None if context not found
|
|
"""
|
|
context = await self.get_context(session_id)
|
|
if not context:
|
|
return None
|
|
|
|
message = context.add_message(role, content, metadata)
|
|
|
|
# Save after each message
|
|
await self.save_context(session_id)
|
|
|
|
return message
|
|
|
|
async def get_messages_for_llm(
|
|
self,
|
|
session_id: str
|
|
) -> Optional[List[Dict[str, str]]]:
|
|
"""
|
|
Gets formatted messages for LLM API call.
|
|
|
|
Args:
|
|
session_id: The session ID
|
|
|
|
Returns:
|
|
List of message dicts or None
|
|
"""
|
|
context = await self.get_context(session_id)
|
|
if not context:
|
|
return None
|
|
|
|
return context.get_messages_for_llm()
|
|
|
|
async def _cache_in_valkey(
|
|
self,
|
|
session_id: str,
|
|
context: ConversationContext
|
|
) -> None:
|
|
"""Caches context in Valkey"""
|
|
if not self.redis:
|
|
return
|
|
|
|
try:
|
|
# 24 hour TTL for contexts
|
|
await self.redis.setex(
|
|
self._redis_key(session_id),
|
|
86400,
|
|
json.dumps(context.to_dict())
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to cache context in Valkey: {e}")
|
|
|
|
async def _get_from_valkey(
|
|
self,
|
|
session_id: str
|
|
) -> Optional[ConversationContext]:
|
|
"""Retrieves context from Valkey"""
|
|
if not self.redis:
|
|
return None
|
|
|
|
try:
|
|
data = await self.redis.get(self._redis_key(session_id))
|
|
if data:
|
|
return ConversationContext.from_dict(json.loads(data))
|
|
except Exception as e:
|
|
logger.warning(f"Failed to get context from Valkey: {e}")
|
|
|
|
return None
|