Initial commit: breakpilot-compliance - Compliance SDK Platform
Services: Admin-Compliance, Backend-Compliance, AI-Compliance-SDK, Consent-SDK, Developer-Portal, PCA-Platform, DSMS Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
139
breakpilot-compliance-sdk/services/rag-service/rag/assistant.py
Normal file
139
breakpilot-compliance-sdk/services/rag-service/rag/assistant.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
Assistant Service for RAG
|
||||
|
||||
Handles Q&A using LLM with retrieved context.
|
||||
"""
|
||||
|
||||
import httpx
|
||||
from typing import List, Optional, Dict, Any
|
||||
import structlog
|
||||
|
||||
from .search import SearchService
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
SYSTEM_PROMPT = """Du bist ein Experte für Datenschutz- und Compliance-Recht.
|
||||
Beantworte Fragen basierend auf den bereitgestellten Rechtstexten.
|
||||
Zitiere immer die relevanten Artikel und Paragraphen.
|
||||
Antworte auf Deutsch.
|
||||
Wenn du dir nicht sicher bist, sage das klar.
|
||||
"""
|
||||
|
||||
|
||||
class AssistantService:
|
||||
"""Service for legal Q&A using RAG."""
|
||||
|
||||
def __init__(self, settings):
|
||||
self.settings = settings
|
||||
self.search_service = SearchService(settings)
|
||||
|
||||
async def ask(
|
||||
self,
|
||||
question: str,
|
||||
context: Optional[str] = None,
|
||||
regulation_codes: Optional[List[str]] = None,
|
||||
include_citations: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""Answer a legal question using RAG."""
|
||||
|
||||
# Search for relevant context
|
||||
search_results = await self.search_service.search(
|
||||
query=question,
|
||||
regulation_codes=regulation_codes,
|
||||
limit=5,
|
||||
min_score=0.6
|
||||
)
|
||||
|
||||
# Build context from search results
|
||||
retrieved_context = "\n\n".join([
|
||||
f"[{r['regulation_code']} Art. {r['article']}]: {r['content']}"
|
||||
for r in search_results
|
||||
])
|
||||
|
||||
# Add user-provided context if any
|
||||
if context:
|
||||
retrieved_context = f"{context}\n\n{retrieved_context}"
|
||||
|
||||
# Build prompt
|
||||
prompt = f"""Kontext aus Rechtstexten:
|
||||
{retrieved_context}
|
||||
|
||||
Frage: {question}
|
||||
|
||||
Beantworte die Frage basierend auf dem Kontext. Zitiere relevante Artikel."""
|
||||
|
||||
# Generate answer
|
||||
answer = await self._generate_response(prompt)
|
||||
|
||||
# Extract citations
|
||||
citations = []
|
||||
if include_citations:
|
||||
for result in search_results:
|
||||
citations.append({
|
||||
"regulation_code": result["regulation_code"],
|
||||
"article": result.get("article", ""),
|
||||
"text": result["content"][:200] + "...",
|
||||
"relevance": result["score"]
|
||||
})
|
||||
|
||||
return {
|
||||
"answer": answer,
|
||||
"citations": citations,
|
||||
"confidence": self._calculate_confidence(search_results)
|
||||
}
|
||||
|
||||
async def _generate_response(self, prompt: str) -> str:
|
||||
"""Generate response using Ollama."""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{self.settings.ollama_url}/api/generate",
|
||||
json={
|
||||
"model": self.settings.llm_model,
|
||||
"prompt": prompt,
|
||||
"system": SYSTEM_PROMPT,
|
||||
"stream": False,
|
||||
"options": {
|
||||
"temperature": 0.3,
|
||||
"top_p": 0.9
|
||||
}
|
||||
},
|
||||
timeout=120.0
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()["response"]
|
||||
except httpx.TimeoutException:
|
||||
logger.error("LLM request timed out")
|
||||
return "Die Anfrage hat zu lange gedauert. Bitte versuchen Sie es erneut."
|
||||
except Exception as e:
|
||||
logger.error("LLM generation failed", error=str(e))
|
||||
# Return fallback response
|
||||
return self._generate_fallback_response(prompt)
|
||||
|
||||
def _generate_fallback_response(self, prompt: str) -> str:
|
||||
"""Generate a fallback response without LLM."""
|
||||
return """Basierend auf den verfügbaren Rechtstexten:
|
||||
|
||||
Die relevanten Regelungen finden sich in den zitierten Artikeln.
|
||||
Für eine detaillierte rechtliche Bewertung empfehle ich die Konsultation
|
||||
der vollständigen Gesetzestexte oder eines Rechtsbeistands.
|
||||
|
||||
Hinweis: Dies ist eine automatisch generierte Antwort.
|
||||
Der LLM-Dienst war nicht verfügbar."""
|
||||
|
||||
def _calculate_confidence(self, search_results: List[Dict]) -> float:
|
||||
"""Calculate confidence score based on search results."""
|
||||
if not search_results:
|
||||
return 0.3
|
||||
|
||||
# Average relevance score
|
||||
avg_score = sum(r["score"] for r in search_results) / len(search_results)
|
||||
|
||||
# Adjust based on number of results
|
||||
if len(search_results) >= 3:
|
||||
confidence = avg_score * 1.1
|
||||
else:
|
||||
confidence = avg_score * 0.9
|
||||
|
||||
return min(confidence, 1.0)
|
||||
Reference in New Issue
Block a user