Files
certifai/src/pages/dashboard.rs
Sharang Parnerkar 50237f5377
All checks were successful
CI / Format (push) Successful in 2s
CI / Clippy (push) Successful in 2m13s
CI / Security Audit (push) Successful in 1m37s
CI / Tests (push) Successful in 2m52s
CI / Deploy (push) Successful in 2s
feat(chat): added chat interface and connection to ollama (#10)
Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com>
Reviewed-on: #10
2026-02-20 19:40:25 +00:00

509 lines
25 KiB
Rust

use dioxus::prelude::*;
use dioxus_sdk::storage::use_persistent;
use crate::components::{ArticleDetail, DashboardSidebar, NewsCardView, PageHeader};
use crate::infrastructure::chat::{create_chat_session, save_chat_message};
use crate::infrastructure::llm::FollowUpMessage;
use crate::models::NewsCard;
/// Maximum number of recent searches to retain in localStorage.
const MAX_RECENT_SEARCHES: usize = 10;
/// Default search topics shown on the dashboard, inspired by Perplexica.
const DEFAULT_TOPICS: &[&str] = &[
"AI",
"Technology",
"Science",
"Finance",
"Writing",
"Research",
];
/// Dashboard page displaying AI news from SearXNG with topic-based filtering,
/// a split-view article detail panel, and LLM-powered summarization.
///
/// State is persisted across sessions using localStorage:
/// - `certifai_topics`: custom user-defined search topics
/// - `certifai_ollama_url`: Ollama instance URL for summarization
/// - `certifai_ollama_model`: Ollama model ID for summarization
#[component]
pub fn DashboardPage() -> Element {
// Persistent state stored in localStorage
let mut custom_topics = use_persistent("certifai_topics".to_string(), Vec::<String>::new);
// Default to empty so the server functions use OLLAMA_URL / OLLAMA_MODEL
// from .env. Only stores a non-empty value when the user explicitly saves
// an override via the Settings panel.
let mut ollama_url = use_persistent("certifai_ollama_url".to_string(), String::new);
let mut ollama_model = use_persistent("certifai_ollama_model".to_string(), String::new);
// Reactive signals for UI state
let mut active_topic = use_signal(|| "AI".to_string());
let mut selected_card = use_signal(|| Option::<NewsCard>::None);
let mut summary = use_signal(|| Option::<String>::None);
let mut is_summarizing = use_signal(|| false);
let mut show_add_input = use_signal(|| false);
let mut new_topic_text = use_signal(String::new);
let mut show_settings = use_signal(|| false);
let mut settings_url = use_signal(String::new);
let mut settings_model = use_signal(String::new);
// Chat follow-up state
let mut chat_messages = use_signal(Vec::<FollowUpMessage>::new);
let mut is_chatting = use_signal(|| false);
// Stores the article text context for the chat system message
let mut article_context = use_signal(String::new);
// MongoDB session ID for persisting News chat (created on first follow-up)
let mut news_session_id: Signal<Option<String>> = use_signal(|| None);
// Recent search history, persisted in localStorage (capped at MAX_RECENT_SEARCHES)
let mut recent_searches =
use_persistent("certifai_recent_searches".to_string(), Vec::<String>::new);
// Build the complete topic list: defaults + custom
let all_topics: Vec<String> = {
let custom = custom_topics.read();
let mut topics: Vec<String> = DEFAULT_TOPICS.iter().map(|s| (*s).to_string()).collect();
for t in custom.iter() {
if !topics.contains(t) {
topics.push(t.clone());
}
}
topics
};
// Fetch trending topics once on mount (no signal deps = runs once).
// use_resource handles deduplication and won't re-fetch on re-renders.
let trending_resource = use_resource(|| async {
match crate::infrastructure::searxng::get_trending_topics().await {
Ok(topics) => topics,
Err(e) => {
tracing::error!("Failed to fetch trending topics: {e}");
Vec::new()
}
}
});
// Push a topic to the front of recent searches (deduplicating, capped).
// Defined as a closure so it can be called from multiple click handlers.
let mut record_search = move |topic: &str| {
let mut searches = recent_searches.read().clone();
searches.retain(|t| t != topic);
searches.insert(0, topic.to_string());
searches.truncate(MAX_RECENT_SEARCHES);
*recent_searches.write() = searches;
};
// Fetch news reactively when active_topic changes.
// use_resource tracks the signal read inside the closure and only
// re-fetches when active_topic actually changes -- unlike use_effect
// which can re-fire on unrelated re-renders.
let search_resource = use_resource(move || {
let topic = active_topic.read().clone();
async move { crate::infrastructure::searxng::search_topic(topic).await }
});
// Check if an article is selected for split view
let has_selection = selected_card.read().is_some();
let container_class = if has_selection {
"dashboard-split"
} else {
"dashboard-with-sidebar"
};
// Resolve trending from resource (empty while loading / on error)
let trending_topics: Vec<String> = trending_resource
.read()
.as_ref()
.cloned()
.unwrap_or_default();
// Resolve search state from resource
let search_state = search_resource.read();
let is_loading = search_state.is_none();
let search_error: Option<String> = search_state
.as_ref()
.and_then(|r| r.as_ref().err().map(|e| format!("Search failed: {e}")));
let news_cards: Vec<NewsCard> = match search_state.as_ref() {
Some(Ok(c)) => c.clone(),
Some(Err(_)) => crate::components::news_card::mock_news(),
None => Vec::new(),
};
// Drop the borrow before entering rsx! so signals can be written in handlers
drop(search_state);
rsx! {
section { class: "dashboard-page",
PageHeader {
title: "Dashboard".to_string(),
subtitle: "AI news and updates".to_string(),
}
// Topic tabs row
div { class: "dashboard-filters",
for topic in &all_topics {
{
let is_active = *active_topic.read() == *topic;
let class_name = if is_active {
"filter-tab filter-tab--active"
} else {
"filter-tab"
};
let is_custom = !DEFAULT_TOPICS.contains(&topic.as_str());
let topic_click = topic.clone();
let topic_remove = topic.clone();
rsx! {
div { class: "topic-tab-wrapper",
button {
class: "{class_name}",
onclick: move |_| {
record_search(&topic_click);
active_topic.set(topic_click.clone());
selected_card.set(None);
summary.set(None);
},
"{topic}"
}
if is_custom {
button {
class: "topic-remove",
onclick: move |_| {
let mut topics = custom_topics.read().clone();
topics.retain(|t| *t != topic_remove);
*custom_topics.write() = topics;
// If we removed the active topic, reset
if *active_topic.read() == topic_remove {
active_topic.set("AI".to_string());
}
},
"x"
}
}
}
}
}
}
// Add topic button / inline input
if *show_add_input.read() {
div { class: "topic-input-wrapper",
input {
class: "topic-input",
r#type: "text",
placeholder: "Topic name...",
value: "{new_topic_text}",
oninput: move |e| new_topic_text.set(e.value()),
onkeypress: move |e| {
if e.key() == Key::Enter {
let val = new_topic_text.read().trim().to_string();
if !val.is_empty() {
let mut topics = custom_topics.read().clone();
if !topics.contains(&val) && !DEFAULT_TOPICS.contains(&val.as_str()) {
topics.push(val.clone());
*custom_topics.write() = topics;
record_search(&val);
active_topic.set(val);
}
}
new_topic_text.set(String::new());
show_add_input.set(false);
}
},
}
button {
class: "topic-cancel-btn",
onclick: move |_| {
show_add_input.set(false);
new_topic_text.set(String::new());
},
"Cancel"
}
}
} else {
button {
class: "topic-add-btn",
onclick: move |_| show_add_input.set(true),
"+"
}
}
// Settings toggle
button {
class: "filter-tab settings-toggle",
onclick: move |_| {
let currently_shown = *show_settings.read();
if !currently_shown {
settings_url.set(ollama_url.read().clone());
settings_model.set(ollama_model.read().clone());
}
show_settings.set(!currently_shown);
},
"Settings"
}
}
// Settings panel (collapsible)
if *show_settings.read() {
div { class: "settings-panel",
h4 { class: "settings-panel-title", "Ollama Settings" }
p { class: "settings-hint",
"Leave empty to use OLLAMA_URL / OLLAMA_MODEL from .env"
}
div { class: "settings-field",
label { "Ollama URL" }
input {
class: "settings-input",
r#type: "text",
placeholder: "Uses OLLAMA_URL from .env",
value: "{settings_url}",
oninput: move |e| settings_url.set(e.value()),
}
}
div { class: "settings-field",
label { "Model" }
input {
class: "settings-input",
r#type: "text",
placeholder: "Uses OLLAMA_MODEL from .env",
value: "{settings_model}",
oninput: move |e| settings_model.set(e.value()),
}
}
button {
class: "btn btn-primary",
onclick: move |_| {
*ollama_url.write() = settings_url.read().trim().to_string();
*ollama_model.write() = settings_model.read().trim().to_string();
show_settings.set(false);
},
"Save"
}
}
}
// Loading / error state
if is_loading {
div { class: "dashboard-loading", "Searching..." }
}
if let Some(ref err) = search_error {
div { class: "settings-hint", "{err}" }
}
// Main content area: grid + optional detail panel
div { class: "{container_class}",
// Left: news grid
div { class: if has_selection { "dashboard-left" } else { "dashboard-full-grid" },
div { class: if has_selection { "news-grid news-grid--compact" } else { "news-grid" },
for card in news_cards.iter() {
{
let is_selected = selected_card
// Auto-summarize on card selection
.read()
// Store context for follow-up chat
.as_ref()
.is_some_and(|s| s.url == card.url && s.title == card.title);
rsx! {
NewsCardView {
key: "{card.title}-{card.url}",
card: card.clone(),
selected: is_selected,
on_click: move |c: NewsCard| {
let snippet = c.content.clone();
let article_url = c.url.clone();
selected_card.set(Some(c));
summary.set(None);
chat_messages.set(Vec::new());
article_context.set(String::new());
news_session_id.set(None);
let oll_url = ollama_url.read().clone();
let mdl = ollama_model.read().clone();
spawn(async move {
is_summarizing.set(true);
match crate::infrastructure::llm::summarize_article(
snippet.clone(),
article_url,
oll_url,
mdl,
)
.await
{
Ok(text) => {
article_context
.set(
format!(
"Article content:\n{snippet}\n\n\
AI Summary:\n{text}",
),
);
summary.set(Some(text));
}
Err(e) => {
tracing::error!("Summarization failed: {e}");
summary.set(Some(format!("Summarization failed: {e}")));
}
}
is_summarizing.set(false);
});
},
}
}
}
}
}
}
// Right: article detail panel (when card selected)
if let Some(ref card) = *selected_card.read() {
div { class: "dashboard-right",
ArticleDetail {
card: card.clone(),
on_close: move |_| {
selected_card.set(None);
summary.set(None);
chat_messages.set(Vec::new());
news_session_id.set(None);
},
summary: summary.read().clone(),
is_summarizing: *is_summarizing.read(),
chat_messages: chat_messages.read().clone(),
is_chatting: *is_chatting.read(),
on_chat_send: move |question: String| {
let oll_url = ollama_url.read().clone();
let mdl = ollama_model.read().clone();
let ctx = article_context.read().clone();
// Capture article info for News session creation
let card_title = selected_card
.read()
.as_ref()
.map(|c| c.title.clone())
.unwrap_or_default();
let card_url = selected_card
.read()
.as_ref()
.map(|c| c.url.clone())
.unwrap_or_default();
// Append user message to local chat
chat_messages.write().push(FollowUpMessage {
role: "user".into(),
content: question.clone(),
});
// Build full message history for Ollama
let system_msg = format!(
"You are a helpful assistant. The user is reading \
a news article. Use the following context to answer \
their questions. Do NOT comment on the source, \
dates, URLs, or formatting.\n\n{ctx}",
);
let msgs = {
let history = chat_messages.read();
let mut all = vec![FollowUpMessage {
role: "system".into(),
content: system_msg.clone(),
}];
all.extend(history.iter().cloned());
all
};
spawn(async move {
is_chatting.set(true);
// Create News session on first follow-up message
let existing_sid = news_session_id.read().clone();
let sid = if let Some(id) = existing_sid {
id
} else {
match create_chat_session(
card_title,
"News".to_string(),
"ollama".to_string(),
mdl.clone(),
card_url,
)
.await
{
Ok(session) => {
let id = session.id.clone();
news_session_id.set(Some(id.clone()));
// Persist system context as first message
let _ = save_chat_message(
id.clone(),
"system".to_string(),
system_msg,
)
.await;
id
}
Err(e) => {
tracing::error!("Failed to create News session: {e}");
String::new()
}
}
};
// Persist user message
if !sid.is_empty() {
let _ = save_chat_message(
sid.clone(),
"user".to_string(),
question,
)
.await;
}
match crate::infrastructure::llm::chat_followup(
msgs, oll_url, mdl,
)
.await
{
Ok(reply) => {
// Persist assistant message
if !sid.is_empty() {
let _ = save_chat_message(
sid,
"assistant".to_string(),
reply.clone(),
)
.await;
}
chat_messages.write().push(FollowUpMessage {
role: "assistant".into(),
content: reply,
});
}
Err(e) => {
tracing::error!("Chat failed: {e}");
chat_messages.write().push(FollowUpMessage {
role: "assistant".into(),
content: format!("Error: {e}"),
});
}
}
is_chatting.set(false);
});
},
}
}
}
// Right: sidebar (when no card selected)
if !has_selection {
DashboardSidebar {
ollama_url: ollama_url.read().clone(),
trending: trending_topics.clone(),
recent_searches: recent_searches.read().clone(),
on_topic_click: move |topic: String| {
record_search(&topic);
active_topic.set(topic);
selected_card.set(None);
summary.set(None);
},
}
}
}
}
}
}