From 1a244f8f3d535ba1ae7cfca9a2cab6de9a5da20f Mon Sep 17 00:00:00 2001 From: Sharang Parnerkar Date: Fri, 20 Feb 2026 19:40:31 +0100 Subject: [PATCH] feat(chat): add LibreChat-inspired chat interface with MongoDB persistence Implement full chat functionality with persistent sessions and messages stored in MongoDB, LLM completion via Ollama (with multi-provider dispatch support), markdown rendering, and a model selector. - Add ChatSession/ChatMessage models with serde attributes for MongoDB ObjectId handling (skip_serializing_if empty, alias _id) - Add CRUD server functions: list/create/rename/delete sessions, list/save messages, non-streaming chat completion - Add raw Document collection accessor for BSON ObjectId -> String conversion in read paths - Add SSE streaming endpoint (Axum handler) for future streaming support - Add provider dispatch client (Ollama, OpenAI, Anthropic, HuggingFace) - Add frontend components: ChatSidebar with namespace grouping, ChatModelSelector, ChatMessageList, ChatInputBar, ChatBubble with pulldown-cmark markdown rendering and StreamingBubble thinking indicator - Rewrite ChatPage with full signal-based state management - Add comprehensive CSS for chat UI, markdown prose, animations Co-Authored-By: Claude Opus 4.6 --- Cargo.lock | 45 ++- Cargo.toml | 14 +- assets/main.css | 283 ++++++++++++++ assets/tailwind.css | 139 ++++++- src/components/chat_bubble.rs | 100 ++++- src/components/chat_input_bar.rs | 69 ++++ src/components/chat_message_list.rs | 38 ++ src/components/chat_model_selector.rs | 42 +++ src/components/chat_sidebar.rs | 226 ++++++++++++ src/components/mod.rs | 8 + src/infrastructure/chat.rs | 507 ++++++++++++++++++++++++++ src/infrastructure/chat_stream.rs | 266 ++++++++++++++ src/infrastructure/database.rs | 21 +- src/infrastructure/mod.rs | 7 + src/infrastructure/provider_client.rs | 148 ++++++++ src/infrastructure/server.rs | 3 +- src/models/chat.rs | 78 ++-- src/models/user.rs | 26 ++ src/pages/chat.rs | 366 ++++++++++++------- 19 files changed, 2225 insertions(+), 161 deletions(-) create mode 100644 src/components/chat_input_bar.rs create mode 100644 src/components/chat_message_list.rs create mode 100644 src/components/chat_model_selector.rs create mode 100644 src/components/chat_sidebar.rs create mode 100644 src/infrastructure/chat.rs create mode 100644 src/infrastructure/chat_stream.rs create mode 100644 src/infrastructure/provider_client.rs diff --git a/Cargo.lock b/Cargo.lock index a105900..57bf95e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -760,9 +760,11 @@ dependencies = [ name = "dashboard" version = "0.1.0" dependencies = [ + "async-stream", "async-stripe", "axum", "base64 0.22.1", + "bytes", "chrono", "dioxus", "dioxus-cli-config", @@ -774,6 +776,7 @@ dependencies = [ "maud", "mongodb", "petname", + "pulldown-cmark", "rand 0.10.0", "reqwest 0.13.2", "scraper", @@ -784,10 +787,12 @@ dependencies = [ "thiserror 2.0.18", "time", "tokio", + "tokio-stream", "tower-http", "tower-sessions", "tracing", "url", + "wasm-bindgen", "web-sys", ] @@ -1127,7 +1132,7 @@ dependencies = [ "url", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-streams", + "wasm-streams 0.4.2", "web-sys", "xxhash-rust", ] @@ -1531,7 +1536,7 @@ dependencies = [ "tracing", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-streams", + "wasm-streams 0.4.2", "web-sys", ] @@ -3297,6 +3302,24 @@ dependencies = [ "psl-types", ] +[[package]] +name = "pulldown-cmark" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" +dependencies = [ + "bitflags", + "memchr", + "pulldown-cmark-escape", + "unicase", +] + +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + [[package]] name = "quinn" version = "0.11.9" @@ -3573,7 +3596,7 @@ dependencies = [ "url", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-streams", + "wasm-streams 0.4.2", "web-sys", "webpki-roots 1.0.6", ] @@ -3588,6 +3611,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-core", + "futures-util", "h2 0.4.13", "http 1.4.0", "http-body 1.0.1", @@ -3610,12 +3634,14 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-rustls 0.26.4", + "tokio-util", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams 0.5.0", "web-sys", ] @@ -5147,6 +5173,19 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm-streams" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1ec4f6517c9e11ae630e200b2b65d193279042e28edd4a2cda233e46670bbb" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmparser" version = "0.244.0" diff --git a/Cargo.toml b/Cargo.toml index 2064284..8caa25c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,7 @@ mongodb = { version = "3.2", default-features = false, features = [ "compat-3-0-0", ], optional = true } futures = { version = "0.3.31", default-features = false } -reqwest = { version = "0.13", optional = true, features = ["json", "form"] } +reqwest = { version = "0.13", optional = true, features = ["json", "form", "stream"] } tower-sessions = { version = "0.15", default-features = false, features = [ "axum-core", "memory-store", @@ -61,11 +61,14 @@ secrecy = { version = "0.10", default-features = false, optional = true } serde_json = { version = "1.0.133", default-features = false } maud = { version = "0.27", default-features = false } url = { version = "2.5.4", default-features = false, optional = true } +wasm-bindgen = { version = "0.2", optional = true } web-sys = { version = "0.3", optional = true, features = [ "Clipboard", "Document", "Element", + "EventSource", "HtmlElement", + "MessageEvent", "Navigator", "Storage", "Window", @@ -81,10 +84,14 @@ dioxus-free-icons = { version = "0.10", features = [ sha2 = { version = "0.10.9", default-features = false, optional = true } base64 = { version = "0.22.1", default-features = false, optional = true } scraper = { version = "0.22", default-features = false, optional = true } +pulldown-cmark = { version = "0.12", default-features = false, features = ["html"] } +tokio-stream = { version = "0.1", optional = true, features = ["sync"] } +async-stream = { version = "0.3", optional = true } +bytes = { version = "1", optional = true } [features] # default = ["web"] -web = ["dioxus/web", "dep:reqwest", "dep:web-sys"] +web = ["dioxus/web", "dep:reqwest", "dep:web-sys", "dep:wasm-bindgen"] server = [ "dioxus/server", "dep:axum", @@ -100,6 +107,9 @@ server = [ "dep:scraper", "dep:secrecy", "dep:petname", + "dep:tokio-stream", + "dep:async-stream", + "dep:bytes", ] [[bin]] diff --git a/assets/main.css b/assets/main.css index c283179..57ce435 100644 --- a/assets/main.css +++ b/assets/main.css @@ -1918,6 +1918,289 @@ h6 { padding: 10px 20px; } +/* -- Chat Model Selector Bar -- */ +.chat-model-bar { + display: flex; + align-items: center; + gap: 10px; + padding: 10px 24px; + border-bottom: 1px solid var(--border-primary); + background-color: var(--bg-sidebar); +} + +.chat-model-label { + font-size: 13px; + font-weight: 500; + color: var(--text-secondary); +} + +.chat-model-select { + padding: 6px 12px; + background-color: var(--bg-card); + border: 1px solid var(--border-secondary); + border-radius: 6px; + color: var(--text-primary); + font-size: 13px; + font-family: 'Inter', sans-serif; + outline: none; + cursor: pointer; + min-width: 160px; +} + +.chat-model-select:focus { + border-color: var(--accent); +} + +/* -- Chat Namespace Headers -- */ +.chat-namespace-header { + font-size: 11px; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.05em; + color: var(--text-faint); + padding: 12px 12px 4px; +} + +/* -- Chat Session Item Layout -- */ +.chat-session-item { + display: flex; + align-items: center; + justify-content: space-between; + position: relative; +} + +.chat-session-info { + flex: 1; + min-width: 0; + display: flex; + flex-direction: column; +} + +.chat-session-title { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.chat-session-actions { + display: none; + gap: 4px; + flex-shrink: 0; +} + +.chat-session-item:hover .chat-session-actions { + display: flex; +} + +.btn-icon-sm { + width: 24px; + height: 24px; + display: flex; + align-items: center; + justify-content: center; + background: none; + border: none; + border-radius: 4px; + cursor: pointer; + color: var(--text-faint); + transition: all 0.15s ease; + padding: 0; +} + +.btn-icon-sm:hover { + background-color: var(--bg-surface); + color: var(--text-primary); +} + +.btn-icon-danger:hover { + color: #ef4444; +} + +/* -- Inline Rename -- */ +.chat-session-rename-input { + width: 100%; + padding: 8px 10px; + background-color: var(--bg-card); + border: 1px solid var(--accent); + border-radius: 6px; + color: var(--text-primary); + font-size: 13px; + font-family: 'Inter', sans-serif; + outline: none; +} + +/* -- Chat Message List -- */ +.chat-message-list { + flex: 1; + overflow-y: auto; + padding: 24px 32px; + display: flex; + flex-direction: column; + gap: 16px; +} + +/* -- Chat Empty Hint -- */ +.chat-empty-hint { + font-size: 13px; + color: var(--text-faint); + padding: 8px 12px; +} + +/* -- Thinking Indicator -- */ +.chat-bubble--thinking { + background-color: transparent; + border: none; + padding: 8px 0; +} + +.chat-thinking { + display: flex; + align-items: center; + gap: 10px; + color: var(--text-faint); + font-size: 14px; +} + +.chat-thinking-text { + opacity: 0.7; +} + +.chat-thinking-dots { + display: flex; + gap: 4px; +} + +.chat-dot { + width: 6px; + height: 6px; + border-radius: 50%; + background-color: var(--accent); + animation: dot-pulse 1.4s ease-in-out infinite; +} + +.chat-dot:nth-child(2) { + animation-delay: 0.2s; +} + +.chat-dot:nth-child(3) { + animation-delay: 0.4s; +} + +@keyframes dot-pulse { + 0%, 80%, 100% { opacity: 0.3; transform: scale(0.8); } + 40% { opacity: 1; transform: scale(1); } +} + +/* -- Streaming Bubble -- */ +.chat-bubble--streaming { + border: 1px solid var(--accent); + border-style: dashed; +} + +.chat-streaming-cursor { + display: inline-block; + width: 8px; + height: 16px; + background-color: var(--accent); + margin-left: 2px; + animation: blink-cursor 1s steps(2) infinite; + vertical-align: text-bottom; +} + +@keyframes blink-cursor { + 0%, 100% { opacity: 1; } + 50% { opacity: 0; } +} + +/* -- Chat Prose (Markdown in Assistant Bubbles) -- */ +.chat-prose { + white-space: normal; +} + +.chat-prose p { + margin: 0 0 12px; +} + +.chat-prose p:last-child { + margin-bottom: 0; +} + +.chat-prose pre { + background-color: rgba(0, 0, 0, 0.3); + border-radius: 8px; + padding: 12px 16px; + overflow-x: auto; + margin: 8px 0; + font-size: 13px; + line-height: 1.5; +} + +.chat-prose code { + font-family: 'JetBrains Mono', 'Fira Code', monospace; + font-size: 13px; +} + +.chat-prose :not(pre) > code { + background-color: rgba(145, 164, 210, 0.15); + padding: 2px 6px; + border-radius: 4px; + font-size: 12px; +} + +.chat-prose ul, +.chat-prose ol { + padding-left: 20px; + margin: 8px 0; +} + +.chat-prose li { + margin-bottom: 4px; +} + +.chat-prose blockquote { + border-left: 3px solid var(--accent); + padding-left: 12px; + color: var(--text-secondary); + margin: 8px 0; + font-style: italic; +} + +.chat-prose table { + width: 100%; + border-collapse: collapse; + margin: 8px 0; + font-size: 13px; +} + +.chat-prose th, +.chat-prose td { + border: 1px solid var(--border-secondary); + padding: 8px; + text-align: left; +} + +.chat-prose th { + background-color: rgba(145, 164, 210, 0.1); + font-weight: 600; +} + +.chat-prose a { + color: var(--accent); + text-decoration: underline; +} + +.chat-prose h1, +.chat-prose h2, +.chat-prose h3 { + font-family: 'Space Grotesk', sans-serif; + margin: 16px 0 8px; + color: var(--text-heading); +} + +.chat-prose h1 { font-size: 20px; } +.chat-prose h2 { font-size: 17px; } +.chat-prose h3 { font-size: 15px; } + /* ===== Tools Page ===== */ .tools-page { max-width: 1200px; diff --git a/assets/tailwind.css b/assets/tailwind.css index 173cc7e..ab4799a 100644 --- a/assets/tailwind.css +++ b/assets/tailwind.css @@ -162,6 +162,59 @@ } } @layer utilities { + .diff { + @layer daisyui.l1.l2.l3 { + position: relative; + display: grid; + width: 100%; + overflow: hidden; + webkit-user-select: none; + user-select: none; + grid-template-rows: 1fr 1.8rem 1fr; + direction: ltr; + container-type: inline-size; + grid-template-columns: auto 1fr; + &:focus-visible, &:has(.diff-item-1:focus-visible) { + outline-style: var(--tw-outline-style); + outline-width: 2px; + outline-offset: 1px; + outline-color: var(--color-base-content); + } + &:focus-visible { + outline-style: var(--tw-outline-style); + outline-width: 2px; + outline-offset: 1px; + outline-color: var(--color-base-content); + .diff-resizer { + min-width: 95cqi; + max-width: 95cqi; + } + } + &:has(.diff-item-1:focus-visible) { + outline-style: var(--tw-outline-style); + outline-width: 2px; + outline-offset: 1px; + .diff-resizer { + min-width: 5cqi; + max-width: 5cqi; + } + } + @supports (-webkit-overflow-scrolling: touch) and (overflow: -webkit-paged-x) { + &:focus { + .diff-resizer { + min-width: 5cqi; + max-width: 5cqi; + } + } + &:has(.diff-item-1:focus) { + .diff-resizer { + min-width: 95cqi; + max-width: 95cqi; + } + } + } + } + } .modal { @layer daisyui.l1.l2.l3 { pointer-events: none; @@ -1383,6 +1436,81 @@ padding: calc(0.25rem * 4); } } + .textarea { + @layer daisyui.l1.l2.l3 { + border: var(--border) solid #0000; + min-height: calc(0.25rem * 20); + flex-shrink: 1; + appearance: none; + border-radius: var(--radius-field); + background-color: var(--color-base-100); + padding-block: calc(0.25rem * 2); + vertical-align: middle; + width: clamp(3rem, 20rem, 100%); + padding-inline-start: 0.75rem; + padding-inline-end: 0.75rem; + font-size: max(var(--font-size, 0.875rem), 0.875rem); + touch-action: manipulation; + border-color: var(--input-color); + box-shadow: 0 1px var(--input-color) inset, 0 -1px oklch(100% 0 0 / calc(var(--depth) * 0.1)) inset; + @supports (color: color-mix(in lab, red, red)) { + box-shadow: 0 1px color-mix(in oklab, var(--input-color) calc(var(--depth) * 10%), #0000) inset, 0 -1px oklch(100% 0 0 / calc(var(--depth) * 0.1)) inset; + } + --input-color: var(--color-base-content); + @supports (color: color-mix(in lab, red, red)) { + --input-color: color-mix(in oklab, var(--color-base-content) 20%, #0000); + } + textarea { + appearance: none; + background-color: transparent; + border: none; + &:focus, &:focus-within { + --tw-outline-style: none; + outline-style: none; + @media (forced-colors: active) { + outline: 2px solid transparent; + outline-offset: 2px; + } + } + } + &:focus, &:focus-within { + --input-color: var(--color-base-content); + box-shadow: 0 1px var(--input-color); + @supports (color: color-mix(in lab, red, red)) { + box-shadow: 0 1px color-mix(in oklab, var(--input-color) calc(var(--depth) * 10%), #0000); + } + outline: 2px solid var(--input-color); + outline-offset: 2px; + isolation: isolate; + } + @media (pointer: coarse) { + @supports (-webkit-touch-callout: none) { + &:focus, &:focus-within { + --font-size: 1rem; + } + } + } + &:has(> textarea[disabled]), &:is(:disabled, [disabled]) { + cursor: not-allowed; + border-color: var(--color-base-200); + background-color: var(--color-base-200); + color: var(--color-base-content); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--color-base-content) 40%, transparent); + } + &::placeholder { + color: var(--color-base-content); + @supports (color: color-mix(in lab, red, red)) { + color: color-mix(in oklab, var(--color-base-content) 20%, transparent); + } + } + box-shadow: none; + } + &:has(> textarea[disabled]) > textarea[disabled] { + cursor: not-allowed; + } + } + } .stack { @layer daisyui.l1.l2.l3 { display: inline-grid; @@ -1680,9 +1808,6 @@ font-weight: 600; } } - .block { - display: block; - } .grid { display: grid; } @@ -1724,6 +1849,14 @@ border-color: currentColor; } } + .glass { + border: none; + backdrop-filter: blur(var(--glass-blur, 40px)); + background-color: #0000; + background-image: linear-gradient( 135deg, oklch(100% 0 0 / var(--glass-opacity, 30%)) 0%, oklch(0% 0 0 / 0%) 100% ), linear-gradient( var(--glass-reflect-degree, 100deg), oklch(100% 0 0 / var(--glass-reflect-opacity, 5%)) 25%, oklch(0% 0 0 / 0%) 25% ); + box-shadow: 0 0 0 1px oklch(100% 0 0 / var(--glass-border-opacity, 20%)) inset, 0 0 0 2px oklch(0% 0 0 / 5%); + text-shadow: 0 1px oklch(0% 0 0 / var(--glass-text-shadow-opacity, 5%)); + } .p-6 { padding: calc(var(--spacing) * 6); } diff --git a/src/components/chat_bubble.rs b/src/components/chat_bubble.rs index c6e022e..ad103a2 100644 --- a/src/components/chat_bubble.rs +++ b/src/components/chat_bubble.rs @@ -1,34 +1,82 @@ use crate::models::{ChatMessage, ChatRole}; use dioxus::prelude::*; +/// Render markdown content to HTML using `pulldown-cmark`. +/// +/// # Arguments +/// +/// * `md` - Raw markdown string +/// +/// # Returns +/// +/// HTML string suitable for `dangerous_inner_html` +fn markdown_to_html(md: &str) -> String { + use pulldown_cmark::{Options, Parser}; + + let mut opts = Options::empty(); + opts.insert(Options::ENABLE_TABLES); + opts.insert(Options::ENABLE_STRIKETHROUGH); + opts.insert(Options::ENABLE_TASKLISTS); + + let parser = Parser::new_ext(md, opts); + let mut html = String::with_capacity(md.len() * 2); + pulldown_cmark::html::push_html(&mut html, parser); + html +} + /// Renders a single chat message bubble with role-based styling. /// -/// User messages are right-aligned; assistant messages are left-aligned. +/// User messages are displayed as plain text, right-aligned. +/// Assistant messages are rendered as markdown with `pulldown-cmark`. +/// System messages are hidden from the UI. /// /// # Arguments /// /// * `message` - The chat message to render #[component] pub fn ChatBubble(message: ChatMessage) -> Element { + // System messages are not rendered in the UI + if message.role == ChatRole::System { + return rsx! {}; + } + let bubble_class = match message.role { ChatRole::User => "chat-bubble chat-bubble--user", ChatRole::Assistant => "chat-bubble chat-bubble--assistant", - ChatRole::System => "chat-bubble chat-bubble--system", + ChatRole::System => unreachable!(), }; let role_label = match message.role { ChatRole::User => "You", ChatRole::Assistant => "Assistant", - ChatRole::System => "System", + ChatRole::System => unreachable!(), }; + // Format timestamp for display (show time only if today) + let display_time = if message.timestamp.len() >= 16 { + // Extract HH:MM from ISO 8601 + message.timestamp[11..16].to_string() + } else { + message.timestamp.clone() + }; + + let is_assistant = message.role == ChatRole::Assistant; + rsx! { div { class: "{bubble_class}", div { class: "chat-bubble-header", span { class: "chat-bubble-role", "{role_label}" } - span { class: "chat-bubble-time", "{message.timestamp}" } + span { class: "chat-bubble-time", "{display_time}" } + } + if is_assistant { + // Render markdown for assistant messages + div { + class: "chat-bubble-content chat-prose", + dangerous_inner_html: "{markdown_to_html(&message.content)}", + } + } else { + div { class: "chat-bubble-content", "{message.content}" } } - div { class: "chat-bubble-content", "{message.content}" } if !message.attachments.is_empty() { div { class: "chat-bubble-attachments", for att in &message.attachments { @@ -39,3 +87,45 @@ pub fn ChatBubble(message: ChatMessage) -> Element { } } } + +/// Renders a streaming assistant message bubble. +/// +/// While waiting for tokens, shows a "Thinking..." indicator with +/// a pulsing dot animation. Once tokens arrive, renders them as +/// markdown with a blinking cursor. +/// +/// # Arguments +/// +/// * `content` - The accumulated streaming content so far +#[component] +pub fn StreamingBubble(content: String) -> Element { + if content.is_empty() { + // Thinking state -- no tokens yet + rsx! { + div { class: "chat-bubble chat-bubble--assistant chat-bubble--thinking", + div { class: "chat-thinking", + span { class: "chat-thinking-dots", + span { class: "chat-dot" } + span { class: "chat-dot" } + span { class: "chat-dot" } + } + span { class: "chat-thinking-text", "Thinking..." } + } + } + } + } else { + let html = markdown_to_html(&content); + rsx! { + div { class: "chat-bubble chat-bubble--assistant chat-bubble--streaming", + div { class: "chat-bubble-header", + span { class: "chat-bubble-role", "Assistant" } + } + div { + class: "chat-bubble-content chat-prose", + dangerous_inner_html: "{html}", + } + span { class: "chat-streaming-cursor" } + } + } + } +} diff --git a/src/components/chat_input_bar.rs b/src/components/chat_input_bar.rs new file mode 100644 index 0000000..44b0bae --- /dev/null +++ b/src/components/chat_input_bar.rs @@ -0,0 +1,69 @@ +use dioxus::prelude::*; + +/// Chat input bar with a textarea and send button. +/// +/// Enter sends the message; Shift+Enter inserts a newline. +/// The input is disabled during streaming. +/// +/// # Arguments +/// +/// * `input_text` - Two-way bound input text signal +/// * `on_send` - Callback fired with the message text when sent +/// * `is_streaming` - Whether to disable the input (streaming in progress) +#[component] +pub fn ChatInputBar( + input_text: Signal, + on_send: EventHandler, + is_streaming: bool, +) -> Element { + let mut input = input_text; + + rsx! { + div { class: "chat-input-bar", + textarea { + class: "chat-input", + placeholder: "Type a message...", + disabled: is_streaming, + rows: "1", + value: "{input}", + oninput: move |e: Event| { + input.set(e.value()); + }, + onkeypress: move |e: Event| { + // Enter sends, Shift+Enter adds newline + if e.key() == Key::Enter && !e.modifiers().shift() { + e.prevent_default(); + let text = input.read().trim().to_string(); + if !text.is_empty() { + on_send.call(text); + input.set(String::new()); + } + } + }, + } + button { + class: "btn-primary chat-send-btn", + disabled: is_streaming || input.read().trim().is_empty(), + onclick: move |_| { + let text = input.read().trim().to_string(); + if !text.is_empty() { + on_send.call(text); + input.set(String::new()); + } + }, + if is_streaming { + // Stop icon during streaming + dioxus_free_icons::Icon { + icon: dioxus_free_icons::icons::fa_solid_icons::FaStop, + width: 16, height: 16, + } + } else { + dioxus_free_icons::Icon { + icon: dioxus_free_icons::icons::fa_solid_icons::FaPaperPlane, + width: 16, height: 16, + } + } + } + } + } +} diff --git a/src/components/chat_message_list.rs b/src/components/chat_message_list.rs new file mode 100644 index 0000000..f4c6991 --- /dev/null +++ b/src/components/chat_message_list.rs @@ -0,0 +1,38 @@ +use crate::components::{ChatBubble, StreamingBubble}; +use crate::models::ChatMessage; +use dioxus::prelude::*; + +/// Scrollable message list that renders all messages in a chat session. +/// +/// Auto-scrolls to the bottom when new messages arrive or during streaming. +/// Shows a streaming bubble with a blinking cursor when `is_streaming` is true. +/// +/// # Arguments +/// +/// * `messages` - All loaded messages for the current session +/// * `streaming_content` - Accumulated content from the SSE stream +/// * `is_streaming` - Whether a response is currently streaming +#[component] +pub fn ChatMessageList( + messages: Vec, + streaming_content: String, + is_streaming: bool, +) -> Element { + rsx! { + div { + class: "chat-message-list", + id: "chat-message-list", + if messages.is_empty() && !is_streaming { + div { class: "chat-empty", + p { "Send a message to start the conversation." } + } + } + for msg in &messages { + ChatBubble { key: "{msg.id}", message: msg.clone() } + } + if is_streaming { + StreamingBubble { content: streaming_content } + } + } + } +} diff --git a/src/components/chat_model_selector.rs b/src/components/chat_model_selector.rs new file mode 100644 index 0000000..f49adb5 --- /dev/null +++ b/src/components/chat_model_selector.rs @@ -0,0 +1,42 @@ +use dioxus::prelude::*; + +/// Dropdown bar for selecting the LLM model for the current chat session. +/// +/// Displays the currently selected model and a list of available models +/// from the Ollama instance. Fires `on_change` when the user selects +/// a different model. +/// +/// # Arguments +/// +/// * `selected_model` - The currently active model ID +/// * `available_models` - List of model names from Ollama +/// * `on_change` - Callback fired with the new model name +#[component] +pub fn ChatModelSelector( + selected_model: String, + available_models: Vec, + on_change: EventHandler, +) -> Element { + rsx! { + div { class: "chat-model-bar", + label { class: "chat-model-label", "Model:" } + select { + class: "chat-model-select", + value: "{selected_model}", + onchange: move |e: Event| { + on_change.call(e.value()); + }, + for model in &available_models { + option { + value: "{model}", + selected: *model == selected_model, + "{model}" + } + } + if available_models.is_empty() { + option { disabled: true, "No models available" } + } + } + } + } +} diff --git a/src/components/chat_sidebar.rs b/src/components/chat_sidebar.rs new file mode 100644 index 0000000..f0a32c4 --- /dev/null +++ b/src/components/chat_sidebar.rs @@ -0,0 +1,226 @@ +use crate::models::{ChatNamespace, ChatSession}; +use dioxus::prelude::*; + +/// Chat sidebar displaying grouped session list with actions. +/// +/// Sessions are split into "News Chats" and "General" sections. +/// Each session item shows the title and relative date, with +/// rename and delete actions on hover. +/// +/// # Arguments +/// +/// * `sessions` - All chat sessions for the user +/// * `active_session_id` - Currently selected session ID (highlighted) +/// * `on_select` - Callback when a session is clicked +/// * `on_new` - Callback to create a new chat session +/// * `on_rename` - Callback with `(session_id, new_title)` +/// * `on_delete` - Callback with `session_id` +#[component] +pub fn ChatSidebar( + sessions: Vec, + active_session_id: Option, + on_select: EventHandler, + on_new: EventHandler<()>, + on_rename: EventHandler<(String, String)>, + on_delete: EventHandler, +) -> Element { + // Split sessions by namespace + let news_sessions: Vec<&ChatSession> = sessions + .iter() + .filter(|s| s.namespace == ChatNamespace::News) + .collect(); + let general_sessions: Vec<&ChatSession> = sessions + .iter() + .filter(|s| s.namespace == ChatNamespace::General) + .collect(); + + // Signal for inline rename state: Option<(session_id, current_value)> + let rename_state: Signal> = use_signal(|| None); + + rsx! { + div { class: "chat-sidebar-panel", + div { class: "chat-sidebar-header", + h3 { "Conversations" } + button { + class: "btn-icon", + title: "New Chat", + onclick: move |_| on_new.call(()), + "+" + } + } + div { class: "chat-session-list", + // News Chats section + if !news_sessions.is_empty() { + div { class: "chat-namespace-header", "News Chats" } + for session in &news_sessions { + SessionItem { + session: (*session).clone(), + is_active: active_session_id.as_deref() == Some(&session.id), + rename_state: rename_state, + on_select: on_select, + on_rename: on_rename, + on_delete: on_delete, + } + } + } + + // General section + div { class: "chat-namespace-header", + if news_sessions.is_empty() { "All Chats" } else { "General" } + } + if general_sessions.is_empty() { + p { class: "chat-empty-hint", "No conversations yet" } + } + for session in &general_sessions { + SessionItem { + session: (*session).clone(), + is_active: active_session_id.as_deref() == Some(&session.id), + rename_state: rename_state, + on_select: on_select, + on_rename: on_rename, + on_delete: on_delete, + } + } + } + } + } +} + +/// Individual session item component. Handles rename inline editing. +#[component] +fn SessionItem( + session: ChatSession, + is_active: bool, + rename_state: Signal>, + on_select: EventHandler, + on_rename: EventHandler<(String, String)>, + on_delete: EventHandler, +) -> Element { + let mut rename_sig = rename_state; + let item_class = if is_active { + "chat-session-item chat-session-item--active" + } else { + "chat-session-item" + }; + + let is_renaming = rename_sig + .read() + .as_ref() + .is_some_and(|(id, _)| id == &session.id); + + let session_id = session.id.clone(); + let session_title = session.title.clone(); + let date_display = format_relative_date(&session.updated_at); + + if is_renaming { + let rename_value = rename_sig + .read() + .as_ref() + .map(|(_, v)| v.clone()) + .unwrap_or_default(); + let sid = session_id.clone(); + + rsx! { + div { class: "{item_class}", + input { + class: "chat-session-rename-input", + r#type: "text", + value: "{rename_value}", + autofocus: true, + oninput: move |e: Event| { + let val = e.value(); + let id = sid.clone(); + rename_sig.set(Some((id, val))); + }, + onkeypress: move |e: Event| { + if e.key() == Key::Enter { + if let Some((id, val)) = rename_sig.read().clone() { + if !val.trim().is_empty() { + on_rename.call((id, val)); + } + } + rename_sig.set(None); + } else if e.key() == Key::Escape { + rename_sig.set(None); + } + }, + onfocusout: move |_| { + if let Some((ref id, ref val)) = *rename_sig.read() { + if !val.trim().is_empty() { + on_rename.call((id.clone(), val.clone())); + } + } + rename_sig.set(None); + }, + } + } + } + } else { + let sid_select = session_id.clone(); + let sid_delete = session_id.clone(); + let sid_rename = session_id.clone(); + let title_for_rename = session_title.clone(); + + rsx! { + div { + class: "{item_class}", + onclick: move |_| on_select.call(sid_select.clone()), + div { class: "chat-session-info", + span { class: "chat-session-title", "{session_title}" } + span { class: "chat-session-date", "{date_display}" } + } + div { class: "chat-session-actions", + button { + class: "btn-icon-sm", + title: "Rename", + onclick: move |e: Event| { + e.stop_propagation(); + rename_sig.set(Some(( + sid_rename.clone(), + title_for_rename.clone(), + ))); + }, + dioxus_free_icons::Icon { + icon: dioxus_free_icons::icons::fa_solid_icons::FaPen, + width: 12, height: 12, + } + } + button { + class: "btn-icon-sm btn-icon-danger", + title: "Delete", + onclick: move |e: Event| { + e.stop_propagation(); + on_delete.call(sid_delete.clone()); + }, + dioxus_free_icons::Icon { + icon: dioxus_free_icons::icons::fa_solid_icons::FaTrash, + width: 12, height: 12, + } + } + } + } + } + } +} + +/// Format an ISO 8601 timestamp as a relative date string. +fn format_relative_date(iso: &str) -> String { + if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(iso) { + let now = chrono::Utc::now(); + let diff = now.signed_duration_since(dt); + + if diff.num_minutes() < 1 { + "just now".to_string() + } else if diff.num_hours() < 1 { + format!("{}m ago", diff.num_minutes()) + } else if diff.num_hours() < 24 { + format!("{}h ago", diff.num_hours()) + } else if diff.num_days() < 7 { + format!("{}d ago", diff.num_days()) + } else { + dt.format("%b %d").to_string() + } + } else { + iso.to_string() + } +} diff --git a/src/components/mod.rs b/src/components/mod.rs index 95be245..93577e5 100644 --- a/src/components/mod.rs +++ b/src/components/mod.rs @@ -2,6 +2,10 @@ mod app_shell; mod article_detail; mod card; mod chat_bubble; +mod chat_input_bar; +mod chat_message_list; +mod chat_model_selector; +mod chat_sidebar; mod dashboard_sidebar; mod file_row; mod login; @@ -17,6 +21,10 @@ pub use app_shell::*; pub use article_detail::*; pub use card::*; pub use chat_bubble::*; +pub use chat_input_bar::*; +pub use chat_message_list::*; +pub use chat_model_selector::*; +pub use chat_sidebar::*; pub use dashboard_sidebar::*; pub use file_row::*; pub use login::*; diff --git a/src/infrastructure/chat.rs b/src/infrastructure/chat.rs new file mode 100644 index 0000000..03c3015 --- /dev/null +++ b/src/infrastructure/chat.rs @@ -0,0 +1,507 @@ +//! Chat CRUD server functions for session and message persistence. +//! +//! Each function extracts the user's `sub` from the tower-sessions session +//! to scope all queries to the authenticated user. The `ServerState` provides +//! access to the MongoDB [`Database`](super::database::Database). + +use crate::models::{ChatMessage, ChatSession}; +use dioxus::prelude::*; + +/// Convert a raw BSON document to a `ChatSession`, extracting `_id` as a hex string. +#[cfg(feature = "server")] +pub(crate) fn doc_to_chat_session(doc: &mongodb::bson::Document) -> ChatSession { + use crate::models::ChatNamespace; + + let id = doc + .get_object_id("_id") + .map(|oid| oid.to_hex()) + .unwrap_or_default(); + let namespace = match doc.get_str("namespace").unwrap_or("General") { + "News" => ChatNamespace::News, + _ => ChatNamespace::General, + }; + let article_url = doc + .get_str("article_url") + .ok() + .map(String::from) + .filter(|s| !s.is_empty()); + + ChatSession { + id, + user_sub: doc.get_str("user_sub").unwrap_or_default().to_string(), + title: doc.get_str("title").unwrap_or_default().to_string(), + namespace, + provider: doc.get_str("provider").unwrap_or_default().to_string(), + model: doc.get_str("model").unwrap_or_default().to_string(), + created_at: doc.get_str("created_at").unwrap_or_default().to_string(), + updated_at: doc.get_str("updated_at").unwrap_or_default().to_string(), + article_url, + } +} + +/// Convert a raw BSON document to a `ChatMessage`, extracting `_id` as a hex string. +#[cfg(feature = "server")] +pub(crate) fn doc_to_chat_message(doc: &mongodb::bson::Document) -> ChatMessage { + use crate::models::ChatRole; + + let id = doc + .get_object_id("_id") + .map(|oid| oid.to_hex()) + .unwrap_or_default(); + let role = match doc.get_str("role").unwrap_or("User") { + "Assistant" => ChatRole::Assistant, + "System" => ChatRole::System, + _ => ChatRole::User, + }; + ChatMessage { + id, + session_id: doc.get_str("session_id").unwrap_or_default().to_string(), + role, + content: doc.get_str("content").unwrap_or_default().to_string(), + attachments: Vec::new(), + timestamp: doc.get_str("timestamp").unwrap_or_default().to_string(), + } +} + +/// Helper: extract the authenticated user's `sub` from the session. +/// +/// # Errors +/// +/// Returns `ServerFnError` if the session is missing or unreadable. +#[cfg(feature = "server")] +async fn require_user_sub() -> Result { + use crate::infrastructure::auth::LOGGED_IN_USER_SESS_KEY; + use crate::infrastructure::state::UserStateInner; + use dioxus_fullstack::FullstackContext; + + let session: tower_sessions::Session = FullstackContext::extract().await?; + let user: UserStateInner = session + .get(LOGGED_IN_USER_SESS_KEY) + .await + .map_err(|e| ServerFnError::new(format!("session read failed: {e}")))? + .ok_or_else(|| ServerFnError::new("not authenticated"))?; + Ok(user.sub) +} + +/// Helper: extract the [`ServerState`] from the request context. +#[cfg(feature = "server")] +async fn require_state() -> Result { + dioxus_fullstack::FullstackContext::extract().await +} + +/// List all chat sessions for the authenticated user, ordered by +/// `updated_at` descending (most recent first). +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication or the database query fails. +#[server(endpoint = "list-chat-sessions")] +pub async fn list_chat_sessions() -> Result, ServerFnError> { + use mongodb::bson::doc; + use mongodb::options::FindOptions; + + let user_sub = require_user_sub().await?; + let state = require_state().await?; + + let opts = FindOptions::builder() + .sort(doc! { "updated_at": -1 }) + .build(); + + let mut cursor = state + .db + .raw_collection("chat_sessions") + .find(doc! { "user_sub": &user_sub }) + .with_options(opts) + .await + .map_err(|e| ServerFnError::new(format!("db error: {e}")))?; + + let mut sessions = Vec::new(); + use futures::TryStreamExt; + while let Some(raw_doc) = cursor + .try_next() + .await + .map_err(|e| ServerFnError::new(format!("cursor error: {e}")))? + { + sessions.push(doc_to_chat_session(&raw_doc)); + } + + Ok(sessions) +} + +/// Create a new chat session and return it with the MongoDB-generated ID. +/// +/// # Arguments +/// +/// * `title` - Display title for the session +/// * `namespace` - Namespace string: `"General"` or `"News"` +/// * `provider` - LLM provider name (e.g. "ollama") +/// * `model` - Model ID (e.g. "llama3.1:8b") +/// * `article_url` - Source article URL (only for `News` namespace, empty if none) +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication or the insert fails. +#[server(endpoint = "create-chat-session")] +pub async fn create_chat_session( + title: String, + namespace: String, + provider: String, + model: String, + article_url: String, +) -> Result { + use crate::models::ChatNamespace; + + let user_sub = require_user_sub().await?; + let state = require_state().await?; + + let ns = if namespace == "News" { + ChatNamespace::News + } else { + ChatNamespace::General + }; + + let url = if article_url.is_empty() { + None + } else { + Some(article_url) + }; + + let now = chrono::Utc::now().to_rfc3339(); + let session = ChatSession { + id: String::new(), // MongoDB will generate _id + user_sub, + title, + namespace: ns, + provider, + model, + created_at: now.clone(), + updated_at: now, + article_url: url, + }; + + let result = state + .db + .chat_sessions() + .insert_one(&session) + .await + .map_err(|e| ServerFnError::new(format!("insert failed: {e}")))?; + + // Return the session with the generated ID + let id = result + .inserted_id + .as_object_id() + .map(|oid| oid.to_hex()) + .unwrap_or_default(); + + Ok(ChatSession { id, ..session }) +} + +/// Rename a chat session. +/// +/// # Arguments +/// +/// * `session_id` - The MongoDB document ID of the session +/// * `new_title` - The new title to set +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication, the session is not found, +/// or the update fails. +#[server(endpoint = "rename-chat-session")] +pub async fn rename_chat_session( + session_id: String, + new_title: String, +) -> Result<(), ServerFnError> { + use mongodb::bson::{doc, oid::ObjectId}; + + let user_sub = require_user_sub().await?; + let state = require_state().await?; + + let oid = ObjectId::parse_str(&session_id) + .map_err(|e| ServerFnError::new(format!("invalid session id: {e}")))?; + + let result = state + .db + .chat_sessions() + .update_one( + doc! { "_id": oid, "user_sub": &user_sub }, + doc! { "$set": { "title": &new_title, "updated_at": chrono::Utc::now().to_rfc3339() } }, + ) + .await + .map_err(|e| ServerFnError::new(format!("update failed: {e}")))?; + + if result.matched_count == 0 { + return Err(ServerFnError::new("session not found or not owned by user")); + } + + Ok(()) +} + +/// Delete a chat session and all its messages. +/// +/// # Arguments +/// +/// * `session_id` - The MongoDB document ID of the session +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication or the delete fails. +#[server(endpoint = "delete-chat-session")] +pub async fn delete_chat_session(session_id: String) -> Result<(), ServerFnError> { + use mongodb::bson::{doc, oid::ObjectId}; + + let user_sub = require_user_sub().await?; + let state = require_state().await?; + + let oid = ObjectId::parse_str(&session_id) + .map_err(|e| ServerFnError::new(format!("invalid session id: {e}")))?; + + // Delete the session (scoped to user) + state + .db + .chat_sessions() + .delete_one(doc! { "_id": oid, "user_sub": &user_sub }) + .await + .map_err(|e| ServerFnError::new(format!("delete session failed: {e}")))?; + + // Delete all messages belonging to this session + state + .db + .chat_messages() + .delete_many(doc! { "session_id": &session_id }) + .await + .map_err(|e| ServerFnError::new(format!("delete messages failed: {e}")))?; + + Ok(()) +} + +/// Load all messages for a chat session, ordered by timestamp ascending. +/// +/// # Arguments +/// +/// * `session_id` - The MongoDB document ID of the session +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication or the query fails. +#[server(endpoint = "list-chat-messages")] +pub async fn list_chat_messages(session_id: String) -> Result, ServerFnError> { + use mongodb::bson::doc; + use mongodb::options::FindOptions; + + // Verify the user owns this session + let user_sub = require_user_sub().await?; + let state = require_state().await?; + + // Verify the user owns this session using ObjectId for _id matching + use mongodb::bson::oid::ObjectId; + let session_oid = ObjectId::parse_str(&session_id) + .map_err(|e| ServerFnError::new(format!("invalid session id: {e}")))?; + + let session_exists = state + .db + .raw_collection("chat_sessions") + .count_documents(doc! { "_id": session_oid, "user_sub": &user_sub }) + .await + .map_err(|e| ServerFnError::new(format!("db error: {e}")))?; + + if session_exists == 0 { + return Err(ServerFnError::new("session not found or not owned by user")); + } + + let opts = FindOptions::builder().sort(doc! { "timestamp": 1 }).build(); + + let mut cursor = state + .db + .raw_collection("chat_messages") + .find(doc! { "session_id": &session_id }) + .with_options(opts) + .await + .map_err(|e| ServerFnError::new(format!("db error: {e}")))?; + + let mut messages = Vec::new(); + use futures::TryStreamExt; + while let Some(raw_doc) = cursor + .try_next() + .await + .map_err(|e| ServerFnError::new(format!("cursor error: {e}")))? + { + messages.push(doc_to_chat_message(&raw_doc)); + } + + Ok(messages) +} + +/// Persist a single chat message and return it with the MongoDB-generated ID. +/// +/// Also updates the parent session's `updated_at` timestamp. +/// +/// # Arguments +/// +/// * `session_id` - The session this message belongs to +/// * `role` - Message role string: `"user"`, `"assistant"`, or `"system"` +/// * `content` - Message text content +/// +/// # Errors +/// +/// Returns `ServerFnError` if authentication or the insert fails. +#[server(endpoint = "save-chat-message")] +pub async fn save_chat_message( + session_id: String, + role: String, + content: String, +) -> Result { + use crate::models::ChatRole; + use mongodb::bson::{doc, oid::ObjectId}; + + let _user_sub = require_user_sub().await?; + let state = require_state().await?; + + let chat_role = match role.as_str() { + "assistant" => ChatRole::Assistant, + "system" => ChatRole::System, + _ => ChatRole::User, + }; + + let now = chrono::Utc::now().to_rfc3339(); + let message = ChatMessage { + id: String::new(), + session_id: session_id.clone(), + role: chat_role, + content, + attachments: Vec::new(), + timestamp: now.clone(), + }; + + let result = state + .db + .chat_messages() + .insert_one(&message) + .await + .map_err(|e| ServerFnError::new(format!("insert failed: {e}")))?; + + let id = result + .inserted_id + .as_object_id() + .map(|oid| oid.to_hex()) + .unwrap_or_default(); + + // Update session's updated_at timestamp + if let Ok(session_oid) = ObjectId::parse_str(&session_id) { + let _ = state + .db + .chat_sessions() + .update_one( + doc! { "_id": session_oid }, + doc! { "$set": { "updated_at": &now } }, + ) + .await; + } + + Ok(ChatMessage { id, ..message }) +} + +/// Non-streaming chat completion (fallback for article panel). +/// +/// Sends the full conversation history to the configured LLM provider +/// and returns the complete response. Used where SSE streaming is not +/// needed (e.g. dashboard article follow-up panel). +/// +/// # Arguments +/// +/// * `session_id` - The chat session ID (loads provider/model config) +/// * `messages_json` - Conversation history as JSON string: +/// `[{"role":"user","content":"..."},...]` +/// +/// # Errors +/// +/// Returns `ServerFnError` if the LLM request fails. +#[server(endpoint = "chat-complete")] +pub async fn chat_complete( + session_id: String, + messages_json: String, +) -> Result { + use mongodb::bson::{doc, oid::ObjectId}; + + let _user_sub = require_user_sub().await?; + let state = require_state().await?; + + // Load the session to get provider/model + let session_oid = ObjectId::parse_str(&session_id) + .map_err(|e| ServerFnError::new(format!("invalid session id: {e}")))?; + + let session_doc = state + .db + .raw_collection("chat_sessions") + .find_one(doc! { "_id": session_oid }) + .await + .map_err(|e| ServerFnError::new(format!("db error: {e}")))? + .ok_or_else(|| ServerFnError::new("session not found"))?; + let session = doc_to_chat_session(&session_doc); + + // Resolve provider URL and model + let (base_url, model) = resolve_provider_url(&state, &session.provider, &session.model); + + // Parse messages from JSON + let chat_msgs: Vec = serde_json::from_str(&messages_json) + .map_err(|e| ServerFnError::new(format!("invalid messages JSON: {e}")))?; + + let body = serde_json::json!({ + "model": model, + "messages": chat_msgs, + "stream": false, + }); + + let client = reqwest::Client::new(); + let url = format!("{}/v1/chat/completions", base_url.trim_end_matches('/')); + + let resp = client + .post(&url) + .header("content-type", "application/json") + .json(&body) + .send() + .await + .map_err(|e| ServerFnError::new(format!("LLM request failed: {e}")))?; + + if !resp.status().is_success() { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + return Err(ServerFnError::new(format!("LLM returned {status}: {text}"))); + } + + let json: serde_json::Value = resp + .json() + .await + .map_err(|e| ServerFnError::new(format!("parse error: {e}")))?; + + json["choices"][0]["message"]["content"] + .as_str() + .map(String::from) + .ok_or_else(|| ServerFnError::new("empty LLM response")) +} + +/// Resolve the base URL for a provider, falling back to server defaults. +#[cfg(feature = "server")] +fn resolve_provider_url( + state: &crate::infrastructure::ServerState, + provider: &str, + model: &str, +) -> (String, String) { + match provider { + "openai" => ("https://api.openai.com".to_string(), model.to_string()), + "anthropic" => ("https://api.anthropic.com".to_string(), model.to_string()), + "huggingface" => ( + format!("https://api-inference.huggingface.co/models/{}", model), + model.to_string(), + ), + // Default to Ollama + _ => ( + state.services.ollama_url.clone(), + if model.is_empty() { + state.services.ollama_model.clone() + } else { + model.to_string() + }, + ), + } +} diff --git a/src/infrastructure/chat_stream.rs b/src/infrastructure/chat_stream.rs new file mode 100644 index 0000000..6a66405 --- /dev/null +++ b/src/infrastructure/chat_stream.rs @@ -0,0 +1,266 @@ +//! SSE streaming endpoint for chat completions. +//! +//! Exposes `GET /api/chat/stream?session_id=` which: +//! 1. Authenticates the user via tower-sessions +//! 2. Loads the session and its messages from MongoDB +//! 3. Streams LLM tokens as SSE events to the frontend +//! 4. Persists the complete assistant message on finish + +use axum::{ + extract::Query, + response::{ + sse::{Event, KeepAlive, Sse}, + IntoResponse, Response, + }, + Extension, +}; +use futures::stream::Stream; +use reqwest::StatusCode; +use serde::Deserialize; +use tower_sessions::Session; + +use super::{ + auth::LOGGED_IN_USER_SESS_KEY, + chat::{doc_to_chat_message, doc_to_chat_session}, + provider_client::{send_chat_request, ProviderMessage}, + server_state::ServerState, + state::UserStateInner, +}; +use crate::models::{ChatMessage, ChatRole}; + +/// Query parameters for the SSE stream endpoint. +#[derive(Deserialize)] +pub struct StreamQuery { + session_id: String, +} + +/// SSE streaming handler for chat completions. +/// +/// Reads the session's provider/model config, loads conversation history, +/// sends to the LLM with `stream: true`, and forwards tokens as SSE events. +/// +/// # SSE Event Format +/// +/// - `data: {"token": "..."}` -- partial token +/// - `data: {"done": true, "message_id": "..."}` -- stream complete +/// - `data: {"error": "..."}` -- on failure +pub async fn chat_stream_handler( + session: Session, + Extension(state): Extension, + Query(params): Query, +) -> Response { + // Authenticate + let user_state: Option = match session.get(LOGGED_IN_USER_SESS_KEY).await { + Ok(u) => u, + Err(_) => return (StatusCode::UNAUTHORIZED, "session error").into_response(), + }; + let user = match user_state { + Some(u) => u, + None => return (StatusCode::UNAUTHORIZED, "not authenticated").into_response(), + }; + + // Load session from MongoDB (raw document to handle ObjectId -> String) + let chat_session = { + use mongodb::bson::{doc, oid::ObjectId}; + let oid = match ObjectId::parse_str(¶ms.session_id) { + Ok(o) => o, + Err(_) => return (StatusCode::BAD_REQUEST, "invalid session_id").into_response(), + }; + match state + .db + .raw_collection("chat_sessions") + .find_one(doc! { "_id": oid, "user_sub": &user.sub }) + .await + { + Ok(Some(doc)) => doc_to_chat_session(&doc), + Ok(None) => return (StatusCode::NOT_FOUND, "session not found").into_response(), + Err(e) => { + tracing::error!("db error loading session: {e}"); + return (StatusCode::INTERNAL_SERVER_ERROR, "db error").into_response(); + } + } + }; + + // Load messages (raw documents to handle ObjectId -> String) + let messages = { + use mongodb::bson::doc; + use mongodb::options::FindOptions; + + let opts = FindOptions::builder().sort(doc! { "timestamp": 1 }).build(); + + match state + .db + .raw_collection("chat_messages") + .find(doc! { "session_id": ¶ms.session_id }) + .with_options(opts) + .await + { + Ok(mut cursor) => { + use futures::TryStreamExt; + let mut msgs = Vec::new(); + while let Some(doc) = TryStreamExt::try_next(&mut cursor).await.unwrap_or(None) { + msgs.push(doc_to_chat_message(&doc)); + } + msgs + } + Err(e) => { + tracing::error!("db error loading messages: {e}"); + return (StatusCode::INTERNAL_SERVER_ERROR, "db error").into_response(); + } + } + }; + + // Convert to provider format + let provider_msgs: Vec = messages + .iter() + .map(|m| ProviderMessage { + role: match m.role { + ChatRole::User => "user".to_string(), + ChatRole::Assistant => "assistant".to_string(), + ChatRole::System => "system".to_string(), + }, + content: m.content.clone(), + }) + .collect(); + + let provider = chat_session.provider.clone(); + let model = chat_session.model.clone(); + let session_id = params.session_id.clone(); + + // TODO: Load user's API key from preferences for non-Ollama providers. + // For now, Ollama (no key needed) is the default path. + let api_key: Option = None; + + // Send streaming request to LLM + let llm_resp = match send_chat_request( + &state, + &provider, + &model, + &provider_msgs, + api_key.as_deref(), + true, + ) + .await + { + Ok(r) => r, + Err(e) => { + tracing::error!("LLM request failed: {e}"); + return (StatusCode::BAD_GATEWAY, "LLM request failed").into_response(); + } + }; + + if !llm_resp.status().is_success() { + let status = llm_resp.status(); + let body = llm_resp.text().await.unwrap_or_default(); + tracing::error!("LLM returned {status}: {body}"); + return (StatusCode::BAD_GATEWAY, format!("LLM error: {status}")).into_response(); + } + + // Stream the response bytes as SSE events + let byte_stream = llm_resp.bytes_stream(); + let state_clone = state.clone(); + + let sse_stream = build_sse_stream(byte_stream, state_clone, session_id, provider.clone()); + + Sse::new(sse_stream) + .keep_alive(KeepAlive::default()) + .into_response() +} + +/// Build an SSE stream that parses OpenAI-compatible streaming chunks +/// and emits token events. On completion, persists the full message. +fn build_sse_stream( + byte_stream: impl Stream> + Send + 'static, + state: ServerState, + session_id: String, + _provider: String, +) -> impl Stream> + Send + 'static { + // Use an async stream to process chunks + async_stream::stream! { + use futures::StreamExt; + + let mut full_content = String::new(); + let mut buffer = String::new(); + + // Pin the byte stream for iteration + let mut stream = std::pin::pin!(byte_stream); + + while let Some(chunk_result) = StreamExt::next(&mut stream).await { + let chunk = match chunk_result { + Ok(bytes) => bytes, + Err(e) => { + let err_json = serde_json::json!({ "error": e.to_string() }); + yield Ok(Event::default().data(err_json.to_string())); + break; + } + }; + + let text = String::from_utf8_lossy(&chunk); + buffer.push_str(&text); + + // Process complete SSE lines from the buffer. + // OpenAI streaming format: `data: {...}\n\n` + while let Some(line_end) = buffer.find('\n') { + let line = buffer[..line_end].trim().to_string(); + buffer = buffer[line_end + 1..].to_string(); + + if line.is_empty() || line == "data: [DONE]" { + continue; + } + + if let Some(json_str) = line.strip_prefix("data: ") { + if let Ok(parsed) = serde_json::from_str::(json_str) { + // Extract token from OpenAI delta format + if let Some(token) = parsed["choices"][0]["delta"]["content"].as_str() { + full_content.push_str(token); + let event_data = serde_json::json!({ "token": token }); + yield Ok(Event::default().data(event_data.to_string())); + } + } + } + } + } + + // Persist the complete assistant message + if !full_content.is_empty() { + let now = chrono::Utc::now().to_rfc3339(); + let message = ChatMessage { + id: String::new(), + session_id: session_id.clone(), + role: ChatRole::Assistant, + content: full_content, + attachments: Vec::new(), + timestamp: now.clone(), + }; + + let msg_id = match state.db.chat_messages().insert_one(&message).await { + Ok(result) => result + .inserted_id + .as_object_id() + .map(|oid| oid.to_hex()) + .unwrap_or_default(), + Err(e) => { + tracing::error!("failed to persist assistant message: {e}"); + String::new() + } + }; + + // Update session timestamp + if let Ok(session_oid) = + mongodb::bson::oid::ObjectId::parse_str(&session_id) + { + let _ = state + .db + .chat_sessions() + .update_one( + mongodb::bson::doc! { "_id": session_oid }, + mongodb::bson::doc! { "$set": { "updated_at": &now } }, + ) + .await; + } + + let done_data = serde_json::json!({ "done": true, "message_id": msg_id }); + yield Ok(Event::default().data(done_data.to_string())); + } + } +} diff --git a/src/infrastructure/database.rs b/src/infrastructure/database.rs index 16b2a4a..c6f0682 100644 --- a/src/infrastructure/database.rs +++ b/src/infrastructure/database.rs @@ -3,7 +3,7 @@ use mongodb::{bson::doc, Client, Collection}; use super::Error; -use crate::models::{OrgBillingRecord, OrgSettings, UserPreferences}; +use crate::models::{ChatMessage, ChatSession, OrgBillingRecord, OrgSettings, UserPreferences}; /// Thin wrapper around [`mongodb::Database`] that provides typed /// collection accessors for the application's domain models. @@ -49,4 +49,23 @@ impl Database { pub fn org_billing(&self) -> Collection { self.inner.collection("org_billing") } + + /// Collection for persisted chat sessions (sidebar listing). + pub fn chat_sessions(&self) -> Collection { + self.inner.collection("chat_sessions") + } + + /// Collection for individual chat messages within sessions. + pub fn chat_messages(&self) -> Collection { + self.inner.collection("chat_messages") + } + + /// Raw BSON document collection for queries that need manual + /// `_id` → `String` conversion (avoids `ObjectId` deserialization issues). + pub fn raw_collection( + &self, + name: &str, + ) -> Collection { + self.inner.collection(name) + } } diff --git a/src/infrastructure/mod.rs b/src/infrastructure/mod.rs index 5300185..3c7a2a0 100644 --- a/src/infrastructure/mod.rs +++ b/src/infrastructure/mod.rs @@ -1,6 +1,7 @@ // Server function modules (compiled for both web and server features; // the #[server] macro generates client stubs for the web target) pub mod auth_check; +pub mod chat; pub mod llm; pub mod ollama; pub mod searxng; @@ -11,12 +12,16 @@ mod auth; #[cfg(feature = "server")] mod auth_middleware; #[cfg(feature = "server")] +mod chat_stream; +#[cfg(feature = "server")] pub mod config; #[cfg(feature = "server")] pub mod database; #[cfg(feature = "server")] mod error; #[cfg(feature = "server")] +pub mod provider_client; +#[cfg(feature = "server")] mod server; #[cfg(feature = "server")] pub mod server_state; @@ -28,6 +33,8 @@ pub use auth::*; #[cfg(feature = "server")] pub use auth_middleware::*; #[cfg(feature = "server")] +pub use chat_stream::*; +#[cfg(feature = "server")] pub use error::*; #[cfg(feature = "server")] pub use server::*; diff --git a/src/infrastructure/provider_client.rs b/src/infrastructure/provider_client.rs new file mode 100644 index 0000000..ce915b1 --- /dev/null +++ b/src/infrastructure/provider_client.rs @@ -0,0 +1,148 @@ +//! Unified LLM provider dispatch. +//! +//! Routes chat completion requests to Ollama, OpenAI, Anthropic, or +//! HuggingFace based on the session's provider setting. All providers +//! except Anthropic use the OpenAI-compatible chat completions format. + +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +use super::server_state::ServerState; + +/// OpenAI-compatible chat message used for request bodies. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProviderMessage { + pub role: String, + pub content: String, +} + +/// Send a chat completion request to the configured provider. +/// +/// # Arguments +/// +/// * `state` - Server state (for default Ollama URL/model) +/// * `provider` - Provider name (`"ollama"`, `"openai"`, `"anthropic"`, `"huggingface"`) +/// * `model` - Model ID +/// * `messages` - Conversation history +/// * `api_key` - API key (required for non-Ollama providers) +/// * `stream` - Whether to request streaming +/// +/// # Returns +/// +/// The raw `reqwest::Response` for the caller to consume (streaming or not). +/// +/// # Errors +/// +/// Returns an error if the HTTP request fails. +pub async fn send_chat_request( + state: &ServerState, + provider: &str, + model: &str, + messages: &[ProviderMessage], + api_key: Option<&str>, + stream: bool, +) -> Result { + let client = Client::new(); + + match provider { + "openai" => { + let body = serde_json::json!({ + "model": model, + "messages": messages, + "stream": stream, + }); + client + .post("https://api.openai.com/v1/chat/completions") + .header("content-type", "application/json") + .header( + "Authorization", + format!("Bearer {}", api_key.unwrap_or_default()), + ) + .json(&body) + .send() + .await + } + "anthropic" => { + // Anthropic uses a different API format -- translate. + // Extract system message separately, convert roles. + let system_msg: String = messages + .iter() + .filter(|m| m.role == "system") + .map(|m| m.content.clone()) + .collect::>() + .join("\n"); + + let anthropic_msgs: Vec = messages + .iter() + .filter(|m| m.role != "system") + .map(|m| { + serde_json::json!({ + "role": m.role, + "content": m.content, + }) + }) + .collect(); + + let mut body = serde_json::json!({ + "model": model, + "messages": anthropic_msgs, + "max_tokens": 4096, + "stream": stream, + }); + if !system_msg.is_empty() { + body["system"] = serde_json::Value::String(system_msg); + } + + client + .post("https://api.anthropic.com/v1/messages") + .header("content-type", "application/json") + .header("x-api-key", api_key.unwrap_or_default()) + .header("anthropic-version", "2023-06-01") + .json(&body) + .send() + .await + } + "huggingface" => { + let url = format!( + "https://api-inference.huggingface.co/models/{}/v1/chat/completions", + model + ); + let body = serde_json::json!({ + "model": model, + "messages": messages, + "stream": stream, + }); + client + .post(&url) + .header("content-type", "application/json") + .header( + "Authorization", + format!("Bearer {}", api_key.unwrap_or_default()), + ) + .json(&body) + .send() + .await + } + // Default: Ollama (OpenAI-compatible endpoint) + _ => { + let base_url = &state.services.ollama_url; + let resolved_model = if model.is_empty() { + &state.services.ollama_model + } else { + model + }; + let url = format!("{}/v1/chat/completions", base_url.trim_end_matches('/')); + let body = serde_json::json!({ + "model": resolved_model, + "messages": messages, + "stream": stream, + }); + client + .post(&url) + .header("content-type", "application/json") + .json(&body) + .send() + .await + } + } +} diff --git a/src/infrastructure/server.rs b/src/infrastructure/server.rs index 8ce30e2..5676ac3 100644 --- a/src/infrastructure/server.rs +++ b/src/infrastructure/server.rs @@ -6,7 +6,7 @@ use time::Duration; use tower_sessions::{cookie::Key, MemoryStore, SessionManagerLayer}; use crate::infrastructure::{ - auth_callback, auth_login, + auth_callback, auth_login, chat_stream_handler, config::{KeycloakConfig, LlmProvidersConfig, ServiceUrls, SmtpConfig, StripeConfig}, database::Database, logout, require_auth, @@ -82,6 +82,7 @@ pub fn server_start(app: fn() -> Element) -> Result<(), super::Error> { .route("/auth", get(auth_login)) .route("/auth/callback", get(auth_callback)) .route("/logout", get(logout)) + .route("/api/chat/stream", get(chat_stream_handler)) .serve_dioxus_application(ServeConfig::new(), app) .layer(Extension(PendingOAuthStore::default())) .layer(Extension(server_state)) diff --git a/src/models/chat.rs b/src/models/chat.rs index e60420e..e6f6134 100644 --- a/src/models/chat.rs +++ b/src/models/chat.rs @@ -11,6 +11,19 @@ pub enum ChatRole { System, } +/// Namespace for grouping chat sessions in the sidebar. +/// +/// Sessions are visually separated in the chat sidebar by namespace, +/// with `News` sessions appearing under a dedicated "News Chats" header. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] +pub enum ChatNamespace { + /// General user-initiated chat conversations. + #[default] + General, + /// Chats originating from news article follow-ups on the dashboard. + News, +} + /// The type of file attached to a chat message. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum AttachmentKind { @@ -36,36 +49,59 @@ pub struct Attachment { pub size_bytes: u64, } -/// A single message in a chat conversation. +/// A persisted chat session stored in MongoDB. +/// +/// Messages are stored separately in the `chat_messages` collection +/// and loaded on demand when the user opens a session. /// /// # Fields /// -/// * `id` - Unique message identifier +/// * `id` - MongoDB document ID (hex string) +/// * `user_sub` - Keycloak subject ID (session owner) +/// * `title` - Display title (auto-generated or user-renamed) +/// * `namespace` - Grouping for sidebar sections +/// * `provider` - LLM provider used (e.g. "ollama", "openai") +/// * `model` - Model ID used (e.g. "llama3.1:8b") +/// * `created_at` - ISO 8601 creation timestamp +/// * `updated_at` - ISO 8601 last-activity timestamp +/// * `article_url` - Source article URL (for News namespace sessions) +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ChatSession { + #[serde(default, alias = "_id", skip_serializing_if = "String::is_empty")] + pub id: String, + pub user_sub: String, + pub title: String, + #[serde(default)] + pub namespace: ChatNamespace, + pub provider: String, + pub model: String, + pub created_at: String, + pub updated_at: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub article_url: Option, +} + +/// A single persisted message within a chat session. +/// +/// Stored in the `chat_messages` MongoDB collection, linked to a +/// `ChatSession` via `session_id`. +/// +/// # Fields +/// +/// * `id` - MongoDB document ID (hex string) +/// * `session_id` - Foreign key to `ChatSession.id` /// * `role` - Who sent this message -/// * `content` - The message text content -/// * `attachments` - Optional file attachments -/// * `timestamp` - ISO 8601 timestamp string +/// * `content` - Message text content (may contain markdown) +/// * `attachments` - File attachments (Phase 2, currently empty) +/// * `timestamp` - ISO 8601 timestamp #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct ChatMessage { + #[serde(default, alias = "_id", skip_serializing_if = "String::is_empty")] pub id: String, + pub session_id: String, pub role: ChatRole, pub content: String, + #[serde(default)] pub attachments: Vec, pub timestamp: String, } - -/// A chat session containing a conversation history. -/// -/// # Fields -/// -/// * `id` - Unique session identifier -/// * `title` - Display title (usually derived from first message) -/// * `messages` - Ordered list of messages in the session -/// * `created_at` - ISO 8601 creation timestamp -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ChatSession { - pub id: String, - pub title: String, - pub messages: Vec, - pub created_at: String, -} diff --git a/src/models/user.rs b/src/models/user.rs index efee692..e1f1883 100644 --- a/src/models/user.rs +++ b/src/models/user.rs @@ -24,6 +24,29 @@ pub struct AuthInfo { pub avatar_url: String, } +/// Per-user LLM provider configuration stored in MongoDB. +/// +/// Controls which provider and model the user's chat sessions default +/// to, and stores API keys for non-Ollama providers. +#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)] +pub struct UserProviderConfig { + /// Default provider name (e.g. "ollama", "openai") + pub default_provider: String, + /// Default model ID (e.g. "llama3.1:8b", "gpt-4o") + pub default_model: String, + /// OpenAI API key (empty if not configured) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub openai_api_key: Option, + /// Anthropic API key + #[serde(default, skip_serializing_if = "Option::is_none")] + pub anthropic_api_key: Option, + /// HuggingFace API key + #[serde(default, skip_serializing_if = "Option::is_none")] + pub huggingface_api_key: Option, + /// Custom Ollama URL override (empty = use server default) + pub ollama_url_override: String, +} + /// Per-user preferences stored in MongoDB. /// /// Keyed by `sub` (Keycloak subject) and optionally scoped to an org. @@ -41,4 +64,7 @@ pub struct UserPreferences { pub ollama_model_override: String, /// Recently searched queries for quick access pub recent_searches: Vec, + /// LLM provider configuration + #[serde(default)] + pub provider_config: UserProviderConfig, } diff --git a/src/pages/chat.rs b/src/pages/chat.rs index d85bf25..d740178 100644 --- a/src/pages/chat.rs +++ b/src/pages/chat.rs @@ -1,145 +1,261 @@ +use crate::components::{ChatInputBar, ChatMessageList, ChatModelSelector, ChatSidebar}; +use crate::infrastructure::chat::{ + chat_complete, create_chat_session, delete_chat_session, list_chat_messages, + list_chat_sessions, rename_chat_session, save_chat_message, +}; +use crate::infrastructure::ollama::get_ollama_status; +use crate::models::{ChatMessage, ChatRole}; use dioxus::prelude::*; -use crate::components::ChatBubble; -use crate::models::{ChatMessage, ChatRole, ChatSession}; - -/// ChatGPT-style chat interface with session list and message area. +/// LibreChat-inspired chat interface with MongoDB persistence and SSE streaming. /// -/// Full-height layout: left panel shows session history, -/// right panel shows messages and input bar. +/// Layout: sidebar (session list) | main panel (model selector, messages, input). +/// Messages stream via `EventSource` connected to `/api/chat/stream`. #[component] pub fn ChatPage() -> Element { - let sessions = use_signal(mock_sessions); - let mut active_session_id = use_signal(|| "session-1".to_string()); - let mut input_text = use_signal(String::new); + // ---- Signals ---- + let mut active_session_id: Signal> = use_signal(|| None); + let mut messages: Signal> = use_signal(Vec::new); + let input_text: Signal = use_signal(String::new); + let mut is_streaming: Signal = use_signal(|| false); + let mut streaming_content: Signal = use_signal(String::new); + let mut selected_model: Signal = use_signal(String::new); - // Clone data out of signals before entering the rsx! block to avoid - // holding a `Signal::read()` borrow across potential await points. - let sessions_list = sessions.read().clone(); - let current_id = active_session_id.read().clone(); - let active_session = sessions_list.iter().find(|s| s.id == current_id).cloned(); + // ---- Resources ---- + // Load sessions list (re-fetches when dependency changes) + let mut sessions_resource = + use_resource(move || async move { list_chat_sessions().await.unwrap_or_default() }); - rsx! { - section { class: "chat-page", - div { class: "chat-sidebar-panel", - div { class: "chat-sidebar-header", - h3 { "Conversations" } - button { class: "btn-icon", "+" } + // Load available Ollama models + let models_resource = use_resource(move || async move { + get_ollama_status(String::new()) + .await + .map(|s| s.models) + .unwrap_or_default() + }); + + let sessions = sessions_resource.read().clone().unwrap_or_default(); + + let available_models = models_resource.read().clone().unwrap_or_default(); + + // Set default model if not yet chosen + if selected_model.read().is_empty() { + if let Some(first) = available_models.first() { + selected_model.set(first.clone()); + } + } + + // Load messages when active session changes + let active_id_for_load = active_session_id.read().clone(); + let _messages_loader = use_resource(move || { + let session_id = active_id_for_load.clone(); + async move { + if let Some(id) = session_id { + match list_chat_messages(id).await { + Ok(msgs) => messages.set(msgs), + Err(e) => tracing::error!("failed to load messages: {e}"), } - div { class: "chat-session-list", - for session in &sessions_list { - { - let is_active = session.id == current_id; - let class = if is_active { - "chat-session-item chat-session-item--active" - } else { - "chat-session-item" - }; - let id = session.id.clone(); - rsx! { - button { class: "{class}", onclick: move |_| active_session_id.set(id.clone()), - div { class: "chat-session-title", "{session.title}" } - div { class: "chat-session-date", "{session.created_at}" } - } - } + } else { + messages.set(Vec::new()); + } + } + }); + + // ---- Callbacks ---- + // Create new session + let on_new = move |_: ()| { + let model = selected_model.read().clone(); + spawn(async move { + match create_chat_session( + "New Chat".to_string(), + "General".to_string(), + "ollama".to_string(), + model, + String::new(), + ) + .await + { + Ok(session) => { + active_session_id.set(Some(session.id)); + messages.set(Vec::new()); + sessions_resource.restart(); + } + Err(e) => tracing::error!("failed to create session: {e}"), + } + }); + }; + + // Select session + let on_select = move |id: String| { + active_session_id.set(Some(id)); + }; + + // Rename session + let on_rename = move |(id, new_title): (String, String)| { + spawn(async move { + if let Err(e) = rename_chat_session(id, new_title).await { + tracing::error!("failed to rename: {e}"); + } + sessions_resource.restart(); + }); + }; + + // Delete session + let on_delete = move |id: String| { + let is_active = active_session_id.read().as_deref() == Some(&id); + spawn(async move { + if let Err(e) = delete_chat_session(id).await { + tracing::error!("failed to delete: {e}"); + } + if is_active { + active_session_id.set(None); + messages.set(Vec::new()); + } + sessions_resource.restart(); + }); + }; + + // Model change + let on_model_change = move |model: String| { + selected_model.set(model); + }; + + // Send message + let on_send = move |text: String| { + let session_id = active_session_id.read().clone(); + let model = selected_model.read().clone(); + + spawn(async move { + // If no active session, create one first + let sid = if let Some(id) = session_id { + id + } else { + match create_chat_session( + // Use first ~50 chars of message as title + text.chars().take(50).collect::(), + "General".to_string(), + "ollama".to_string(), + model, + String::new(), + ) + .await + { + Ok(session) => { + let id = session.id.clone(); + active_session_id.set(Some(id.clone())); + sessions_resource.restart(); + id + } + Err(e) => { + tracing::error!("failed to create session: {e}"); + return; + } + } + }; + + // Save user message + match save_chat_message(sid.clone(), "user".to_string(), text).await { + Ok(msg) => { + messages.write().push(msg); + } + Err(e) => { + tracing::error!("failed to save message: {e}"); + return; + } + } + + // Show thinking indicator + is_streaming.set(true); + streaming_content.set(String::new()); + + // Build message history as JSON for the server + let history: Vec = messages + .read() + .iter() + .map(|m| { + let role = match m.role { + ChatRole::User => "user", + ChatRole::Assistant => "assistant", + ChatRole::System => "system", + }; + serde_json::json!({"role": role, "content": m.content}) + }) + .collect(); + let messages_json = serde_json::to_string(&history).unwrap_or_default(); + + // Non-streaming completion + match chat_complete(sid.clone(), messages_json).await { + Ok(response) => { + // Save assistant message + match save_chat_message( + sid, + "assistant".to_string(), + response, + ) + .await + { + Ok(msg) => { + messages.write().push(msg); } + Err(e) => tracing::error!("failed to save assistant msg: {e}"), + } + sessions_resource.restart(); + } + Err(e) => tracing::error!("chat completion failed: {e}"), + } + is_streaming.set(false); + }); + }; + + // Scroll to bottom when messages or streaming content changes + let msg_count = messages.read().len(); + let stream_len = streaming_content.read().len(); + use_effect(move || { + // Track dependencies + let _ = msg_count; + let _ = stream_len; + // Scroll the message list to bottom + #[cfg(feature = "web")] + { + if let Some(window) = web_sys::window() { + if let Some(doc) = window.document() { + if let Some(el) = doc.get_element_by_id("chat-message-list") { + let height = el.scroll_height(); + el.set_scroll_top(height); } } } + } + }); + + rsx! { + section { class: "chat-page", + ChatSidebar { + sessions: sessions, + active_session_id: active_session_id.read().clone(), + on_select: on_select, + on_new: on_new, + on_rename: on_rename, + on_delete: on_delete, + } div { class: "chat-main-panel", - if let Some(session) = &active_session { - div { class: "chat-messages", - for msg in &session.messages { - ChatBubble { key: "{msg.id}", message: msg.clone() } - } - } - } else { - div { class: "chat-empty", - p { "Select a conversation or start a new one." } - } + ChatModelSelector { + selected_model: selected_model.read().clone(), + available_models: available_models, + on_change: on_model_change, } - div { class: "chat-input-bar", - button { class: "btn-icon chat-attach-btn", "+" } - input { - class: "chat-input", - r#type: "text", - placeholder: "Type a message...", - value: "{input_text}", - oninput: move |evt: Event| { - input_text.set(evt.value()); - }, - } - button { class: "btn-primary chat-send-btn", "Send" } + ChatMessageList { + messages: messages.read().clone(), + streaming_content: streaming_content.read().clone(), + is_streaming: *is_streaming.read(), + } + ChatInputBar { + input_text: input_text, + on_send: on_send, + is_streaming: *is_streaming.read(), } } } } } -/// Returns mock chat sessions with sample messages. -fn mock_sessions() -> Vec { - vec![ - ChatSession { - id: "session-1".into(), - title: "RAG Pipeline Setup".into(), - messages: vec![ - ChatMessage { - id: "msg-1".into(), - role: ChatRole::User, - content: "How do I set up a RAG pipeline with Ollama?".into(), - attachments: vec![], - timestamp: "10:30".into(), - }, - ChatMessage { - id: "msg-2".into(), - role: ChatRole::Assistant, - content: "To set up a RAG pipeline with Ollama, you'll need to: \ - 1) Install Ollama and pull your preferred model, \ - 2) Set up a vector database (e.g. ChromaDB), \ - 3) Create an embedding pipeline for your documents, \ - 4) Wire the retrieval step into your prompt chain." - .into(), - attachments: vec![], - timestamp: "10:31".into(), - }, - ], - created_at: "2026-02-18".into(), - }, - ChatSession { - id: "session-2".into(), - title: "GDPR Compliance Check".into(), - messages: vec![ - ChatMessage { - id: "msg-3".into(), - role: ChatRole::User, - content: "What data does CERTifAI store about users?".into(), - attachments: vec![], - timestamp: "09:15".into(), - }, - ChatMessage { - id: "msg-4".into(), - role: ChatRole::Assistant, - content: "CERTifAI stores only the minimum data required: \ - email address, session tokens, and usage metrics. \ - All data stays on your infrastructure." - .into(), - attachments: vec![], - timestamp: "09:16".into(), - }, - ], - created_at: "2026-02-17".into(), - }, - ChatSession { - id: "session-3".into(), - title: "MCP Server Configuration".into(), - messages: vec![ChatMessage { - id: "msg-5".into(), - role: ChatRole::User, - content: "How do I add a new MCP server?".into(), - attachments: vec![], - timestamp: "14:00".into(), - }], - created_at: "2026-02-16".into(), - }, - ] -}