feat: Phase 3 — RAG-Anbindung fuer alle 18 Dokumenttypen + Vendor Contract Review
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 34s
CI / test-python-backend-compliance (push) Successful in 26s
CI / test-python-document-crawler (push) Successful in 21s
CI / test-python-dsms-gateway (push) Successful in 17s
All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-ai-compliance (push) Successful in 34s
CI / test-python-backend-compliance (push) Successful in 26s
CI / test-python-document-crawler (push) Successful in 21s
CI / test-python-dsms-gateway (push) Successful in 17s
Migrate queryRAG from klausur-service GET to bp-core-rag-service POST with multi-collection support. Each of the 18 ScopeDocumentType now gets a type-specific RAG collection and optimized search query instead of the generic fallback. Vendor-compliance contract review now uses LLM + RAG for real analysis with mock fallback on error. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -4,11 +4,18 @@ import {
|
||||
Finding,
|
||||
CONTRACT_REVIEW_SYSTEM_PROMPT,
|
||||
} from '@/lib/sdk/vendor-compliance'
|
||||
import { queryRAG } from '@/lib/sdk/drafting-engine/rag-query'
|
||||
import { transformAnalysisResponse } from '@/lib/sdk/vendor-compliance/contract-review/analyzer'
|
||||
|
||||
const OLLAMA_URL = process.env.OLLAMA_URL || 'http://host.docker.internal:11434'
|
||||
const LLM_MODEL = process.env.COMPLIANCE_LLM_MODEL || 'qwen2.5vl:32b'
|
||||
|
||||
/**
|
||||
* POST /api/sdk/v1/vendor-compliance/contracts/[id]/review
|
||||
*
|
||||
* Starts the LLM-based contract review process
|
||||
* Starts the LLM-based contract review process.
|
||||
* If documentText is provided, runs LLM analysis with RAG context.
|
||||
* Falls back to mock findings on LLM error or missing documentText.
|
||||
*/
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
@@ -16,15 +23,84 @@ export async function POST(
|
||||
) {
|
||||
try {
|
||||
const { id: contractId } = await params
|
||||
const body = await request.json().catch(() => ({}))
|
||||
const { documentText, vendorId, tenantId } = body as {
|
||||
documentText?: string
|
||||
vendorId?: string
|
||||
tenantId?: string
|
||||
}
|
||||
|
||||
// In production:
|
||||
// 1. Fetch contract from database
|
||||
// 2. Extract text from PDF/DOCX using embedding-service
|
||||
// 3. Send to LLM for analysis
|
||||
// 4. Store findings in database
|
||||
// 5. Update contract with compliance score
|
||||
// If documentText is provided, attempt LLM-based analysis
|
||||
if (documentText) {
|
||||
try {
|
||||
// Fetch RAG context for contract review
|
||||
const ragContext = await queryRAG(
|
||||
'AVV Art. 28 DSGVO Auftragsverarbeitung Vertragsanforderungen',
|
||||
3,
|
||||
'bp_compliance_recht'
|
||||
)
|
||||
|
||||
// For demo, return mock analysis results
|
||||
// Build system prompt with RAG context
|
||||
let systemPrompt = CONTRACT_REVIEW_SYSTEM_PROMPT
|
||||
if (ragContext) {
|
||||
systemPrompt += `\n\nRECHTSKONTEXT (als Referenz):\n${ragContext}`
|
||||
}
|
||||
|
||||
// Call Ollama
|
||||
const ollamaResponse = await fetch(`${OLLAMA_URL}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: LLM_MODEL,
|
||||
messages: [
|
||||
{ role: 'system', content: systemPrompt },
|
||||
{ role: 'user', content: `Analysiere den folgenden Vertrag auf DSGVO-Konformitaet:\n\n${documentText}` },
|
||||
],
|
||||
stream: false,
|
||||
options: { temperature: 0.1, num_predict: 16384 },
|
||||
format: 'json',
|
||||
}),
|
||||
signal: AbortSignal.timeout(180000),
|
||||
})
|
||||
|
||||
if (!ollamaResponse.ok) {
|
||||
throw new Error(`LLM nicht erreichbar (Status ${ollamaResponse.status})`)
|
||||
}
|
||||
|
||||
const result = await ollamaResponse.json()
|
||||
const content = result.message?.content || ''
|
||||
const llmResponse = JSON.parse(content)
|
||||
|
||||
// Transform LLM response to typed findings
|
||||
const analysisResult = transformAnalysisResponse(llmResponse, {
|
||||
contractId,
|
||||
vendorId: vendorId || 'unknown',
|
||||
tenantId: tenantId || 'default',
|
||||
documentText,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
contractId,
|
||||
findings: analysisResult.findings,
|
||||
complianceScore: analysisResult.complianceScore,
|
||||
reviewCompletedAt: new Date().toISOString(),
|
||||
topRisks: analysisResult.topRisks,
|
||||
requiredActions: analysisResult.requiredActions,
|
||||
metadata: analysisResult.metadata,
|
||||
parties: analysisResult.parties,
|
||||
source: 'llm',
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
console.warn('LLM contract review failed, falling back to mock:', (error as Error).message)
|
||||
// Fall through to mock findings
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Mock analysis results
|
||||
const mockFindings: Finding[] = [
|
||||
{
|
||||
id: uuidv4(),
|
||||
@@ -152,6 +228,7 @@ export async function POST(
|
||||
{ de: 'Meldefrist auf 24-48h verkürzen', en: 'Reduce notification deadline to 24-48h' },
|
||||
{ de: 'TIA für USA-Transfer durchführen', en: 'Conduct TIA for USA transfer' },
|
||||
],
|
||||
source: 'mock',
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user