feat: Investor Agent — FAQ als LLM-Kontext statt Direkt-Streaming
Architektur-Umbau: FAQ-Antworten werden NICHT mehr direkt gestreamt. Stattdessen werden die Top-3 relevanten FAQ-Einträge als Kontext ans LLM übergeben. Das LLM interpretiert die Frage, kombiniert mehrere FAQs bei komplexen Fragen und antwortet natürlich. Vorher: Frage → Keyword-Match → FAQ direkt streamen (LLM umgangen) Nachher: Frage → Top-3 FAQ-Matches → LLM-Prompt als Kontext → LLM antwortet Neue Funktionen: - matchFAQMultiple(): Top-N Matches statt nur bester - buildFAQContext(): Baut Kontext-String für LLM-Injection - faqContext statt faqAnswer im Request-Body - System-Prompt Anweisung: "Kombiniere bei Bedarf, natürlicher Fließtext" Behebt: Komplexe Fragen mit 2+ Themen werden jetzt korrekt beantwortet Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -157,42 +157,12 @@ ${JSON.stringify(features.rows, null, 2)}
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { message, history = [], lang = 'de', slideContext, faqAnswer } = body
|
||||
const { message, history = [], lang = 'de', slideContext, faqContext } = body
|
||||
|
||||
if (!message || typeof message !== 'string') {
|
||||
return NextResponse.json({ error: 'Message is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// FAQ shortcut: if client sends a pre-cached FAQ answer, stream it directly (no LLM call)
|
||||
if (faqAnswer && typeof faqAnswer === 'string') {
|
||||
const encoder = new TextEncoder()
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Stream the FAQ answer in chunks for consistent UX
|
||||
const words = faqAnswer.split(' ')
|
||||
let i = 0
|
||||
const interval = setInterval(() => {
|
||||
if (i < words.length) {
|
||||
const chunk = (i === 0 ? '' : ' ') + words[i]
|
||||
controller.enqueue(encoder.encode(chunk))
|
||||
i++
|
||||
} else {
|
||||
clearInterval(interval)
|
||||
controller.close()
|
||||
}
|
||||
}, 30)
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain; charset=utf-8',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const pitchContext = await loadPitchContext()
|
||||
|
||||
let systemContent = SYSTEM_PROMPT
|
||||
@@ -200,6 +170,11 @@ export async function POST(request: NextRequest) {
|
||||
systemContent += '\n' + pitchContext
|
||||
}
|
||||
|
||||
// FAQ context: relevant pre-researched answers as basis for the LLM
|
||||
if (faqContext && typeof faqContext === 'string') {
|
||||
systemContent += '\n' + faqContext
|
||||
}
|
||||
|
||||
// Slide context for contextual awareness
|
||||
if (slideContext) {
|
||||
const visited: number[] = slideContext.visitedSlides || []
|
||||
|
||||
Reference in New Issue
Block a user