feat(dashboard): add sidebar with Ollama status, trending topics, and article detail panel
Some checks failed
CI / Format (push) Failing after 6m19s
CI / Clippy (push) Successful in 2m23s
CI / Security Audit (push) Successful in 1m46s
CI / Tests (push) Has been skipped
CI / Format (pull_request) Failing after 6m24s
CI / Clippy (pull_request) Successful in 2m25s
CI / Security Audit (pull_request) Successful in 1m38s
CI / Deploy (push) Has been skipped
CI / Tests (pull_request) Has been skipped
CI / Deploy (pull_request) Has been skipped

Integrate SearXNG news search, Ollama-powered article summarization with
follow-up chat, and a dashboard sidebar showing LLM status, trending
keywords, and recent search history. Sidebar yields to a split-view
article detail panel when a card is selected.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sharang Parnerkar
2026-02-19 18:48:26 +01:00
parent 83772cc256
commit ba5e4b9a5d
16 changed files with 2949 additions and 115 deletions

View File

@@ -7,3 +7,10 @@ KEYCLOAK_CLIENT_ID=certifai-dashboard
APP_URL=http://localhost:8000
REDIRECT_URI=http://localhost:8000/auth/callback
ALLOWED_ORIGINS=http://localhost:8000
# SearXNG meta-search engine
SEARXNG_URL=http://localhost:8888
# Ollama LLM instance (used for article summarization and chat)
OLLAMA_URL=http://mac-mini-von-benjamin-2:11434
OLLAMA_MODEL=qwen3:30b-a3b

270
Cargo.lock generated
View File

@@ -698,6 +698,29 @@ dependencies = [
"typenum",
]
[[package]]
name = "cssparser"
version = "0.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c66d1cd8ed61bf80b38432613a7a2f09401ab8d0501110655f8b341484a3e3"
dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf",
"smallvec",
]
[[package]]
name = "cssparser-macros"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
dependencies = [
"quote",
"syn 2.0.116",
]
[[package]]
name = "darling"
version = "0.21.3"
@@ -753,6 +776,7 @@ dependencies = [
"petname",
"rand 0.10.0",
"reqwest 0.13.2",
"scraper",
"secrecy",
"serde",
"serde_json",
@@ -819,6 +843,17 @@ dependencies = [
"syn 2.0.116",
]
[[package]]
name = "derive_more"
version = "0.99.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.116",
]
[[package]]
name = "derive_more"
version = "2.1.1"
@@ -1050,7 +1085,7 @@ dependencies = [
"const-str",
"const_format",
"content_disposition",
"derive_more",
"derive_more 2.1.1",
"dioxus-asset-resolver",
"dioxus-cli-config",
"dioxus-core",
@@ -1546,12 +1581,33 @@ version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dtoa"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c3cf4824e2d5f025c7b531afcb2325364084a16806f6d47fbc1f5fbd9960590"
[[package]]
name = "dtoa-short"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87"
dependencies = [
"dtoa",
]
[[package]]
name = "dunce"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
[[package]]
name = "ego-tree"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2972feb8dffe7bc8c5463b1dacda1b0dfbed3710e50f977d965429692d74cd8"
[[package]]
name = "either"
version = "1.15.0"
@@ -1674,6 +1730,16 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "futf"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843"
dependencies = [
"mac",
"new_debug_unreachable",
]
[[package]]
name = "futures"
version = "0.3.32"
@@ -1777,6 +1843,15 @@ dependencies = [
"slab",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "generational-box"
version = "0.7.3"
@@ -2015,6 +2090,18 @@ dependencies = [
"digest",
]
[[package]]
name = "html5ever"
version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c"
dependencies = [
"log",
"mac",
"markup5ever",
"match_token",
]
[[package]]
name = "http"
version = "0.2.12"
@@ -2579,6 +2666,12 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "mac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "macro-string"
version = "0.1.4"
@@ -2678,6 +2771,31 @@ dependencies = [
"syn 2.0.116",
]
[[package]]
name = "markup5ever"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18"
dependencies = [
"log",
"phf",
"phf_codegen",
"string_cache",
"string_cache_codegen",
"tendril",
]
[[package]]
name = "match_token"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.116",
]
[[package]]
name = "matchers"
version = "0.2.0"
@@ -2804,7 +2922,7 @@ dependencies = [
"bitflags",
"bson",
"derive-where",
"derive_more",
"derive_more 2.1.1",
"futures-core",
"futures-io",
"futures-util",
@@ -2897,6 +3015,12 @@ dependencies = [
"jni-sys",
]
[[package]]
name = "new_debug_unreachable"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "num-conv"
version = "0.2.0"
@@ -3003,6 +3127,58 @@ dependencies = [
"rand 0.8.5",
]
[[package]]
name = "phf"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_macros",
"phf_shared",
]
[[package]]
name = "phf_codegen"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared",
]
[[package]]
name = "phf_generator"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
"rand 0.8.5",
]
[[package]]
name = "phf_macros"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
"syn 2.0.116",
]
[[package]]
name = "phf_shared"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project"
version = "1.1.10"
@@ -3059,6 +3235,12 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "prettyplease"
version = "0.2.37"
@@ -3630,6 +3812,20 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scraper"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc3d051b884f40e309de6c149734eab57aa8cc1347992710dc80bcc1c2194c15"
dependencies = [
"cssparser",
"ego-tree",
"html5ever",
"precomputed-hash",
"selectors",
"tendril",
]
[[package]]
name = "sct"
version = "0.7.1"
@@ -3672,6 +3868,25 @@ dependencies = [
"libc",
]
[[package]]
name = "selectors"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd568a4c9bb598e291a08244a5c1f5a8a6650bee243b5b0f8dbb3d9cc1d87fe8"
dependencies = [
"bitflags",
"cssparser",
"derive_more 0.99.20",
"fxhash",
"log",
"new_debug_unreachable",
"phf",
"phf_codegen",
"precomputed-hash",
"servo_arc",
"smallvec",
]
[[package]]
name = "semver"
version = "1.0.27"
@@ -3841,6 +4056,15 @@ dependencies = [
"syn 2.0.116",
]
[[package]]
name = "servo_arc"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "170fb83ab34de17dc69aa7c67482b22218ddb85da56546f9bd6b929e32a05930"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "sha1"
version = "0.10.6"
@@ -3888,6 +4112,12 @@ dependencies = [
"libc",
]
[[package]]
name = "siphasher"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
[[package]]
name = "slab"
version = "0.4.12"
@@ -3991,6 +4221,31 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
[[package]]
name = "string_cache"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f"
dependencies = [
"new_debug_unreachable",
"parking_lot",
"phf_shared",
"precomputed-hash",
"serde",
]
[[package]]
name = "string_cache_codegen"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
]
[[package]]
name = "stringprep"
version = "0.1.5"
@@ -4117,6 +4372,17 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tendril"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
dependencies = [
"futf",
"mac",
"utf-8",
]
[[package]]
name = "thiserror"
version = "1.0.69"

View File

@@ -75,6 +75,7 @@ dioxus-free-icons = { version = "0.10", features = [
] }
sha2 = { version = "0.10.9", default-features = false, optional = true }
base64 = { version = "0.22.1", default-features = false, optional = true }
scraper = { version = "0.22", default-features = false, optional = true }
[features]
# default = ["web"]
@@ -91,6 +92,7 @@ server = [
"dep:url",
"dep:sha2",
"dep:base64",
"dep:scraper",
]
[[bin]]

View File

@@ -21,12 +21,32 @@ The SaaS application dashboard is the landing page for the company admin to view
- Request support: Request support or new features using feedback form
- GenAI: View currently running LLMs, Agents, MCP Servers. Modify or add more resources, switch to a different model, launch tools like Langchain + Langfuse for creating new agents,tavily for internet search or more complex tools for use with GenAI. View endpoints and generate API Keys for integrations in other applications.
## Development environment
## Dashboard
This project is written in dioxus with fullstack and router features. MongoDB is used as a database for maintaining user state. Keycloak is used as identity provider for user management.
The main dashboard provides a news feed powered by SearXNG and Ollama:
- **Topic-based search**: Browse AI, Technology, Science, Finance and custom topics. Add or remove topics on the fly; selections persist in localStorage.
- **Article detail + AI summary**: Click any card to open a split-view panel. The full article is fetched, summarized by Ollama, and a follow-up chat lets you ask questions.
- **Sidebar** (visible when no article is selected):
- **Ollama Status** -- green/red indicator with the list of loaded models.
- **Trending** -- keywords extracted from recent news headlines via SearXNG.
- **Recent Searches** -- last 10 topics you searched, persisted in localStorage.
## Code structure
## Development environment
This project is written in Dioxus 0.7 with fullstack and router features. MongoDB is used as a database for maintaining user state. Keycloak is used as identity provider for user management.
### External services
| Service | Purpose | Default URL |
|----------|--------------------------------|----------------------------|
| Keycloak | Identity provider / SSO | `http://localhost:8080` |
| SearXNG | Meta-search engine for news | `http://localhost:8888` |
| Ollama | Local LLM for summarization | `http://localhost:11434` |
Copy `.env.example` to `.env` and adjust the URLs and model name to match your setup.
## Code structure
The following folder structure is maintained for separation of concerns:
- src/components/*.rs : All components that are required to be rendered are placed here. These are frontend only, reusable components that are specific for the application.
- src/infrastructure/*.rs : All backend related functions from the dioxus fullstack are placed here. This entire module is behind the feature "server".

View File

@@ -995,6 +995,8 @@ h1, h2, h3, h4, h5, h6 {
border-radius: 12px;
overflow: hidden;
transition: border-color 0.2s ease, transform 0.2s ease;
display: flex;
flex-direction: column;
}
.news-card:hover {
@@ -1002,14 +1004,26 @@ h1, h2, h3, h4, h5, h6 {
transform: translateY(-2px);
}
.news-card-thumb {
overflow: hidden;
height: 140px;
flex-shrink: 0;
}
.news-card-thumb img {
width: 100%;
height: 140px;
height: 100%;
object-fit: cover;
/* Hide alt text on broken images */
color: transparent;
font-size: 0;
}
.news-card-body {
padding: 20px;
flex: 1;
display: flex;
flex-direction: column;
}
.news-card-meta {
@@ -1027,13 +1041,18 @@ h1, h2, h3, h4, h5, h6 {
border-radius: 4px;
text-transform: uppercase;
letter-spacing: 0.05em;
/* Default badge color for any topic */
background-color: rgba(99, 132, 210, 0.15);
color: #91a4d2;
}
.news-badge--llm { background-color: rgba(99, 102, 241, 0.15); color: #818cf8; }
.news-badge--agents { background-color: rgba(168, 85, 247, 0.15); color: #c084fc; }
.news-badge--privacy { background-color: rgba(34, 197, 94, 0.15); color: #4ade80; }
.news-badge--infrastructure { background-color: rgba(234, 179, 8, 0.15); color: #facc15; }
.news-badge--open-source { background-color: rgba(236, 72, 153, 0.15); color: #f472b6; }
/* Topic-specific badge colors */
.news-badge--ai { background-color: rgba(99, 102, 241, 0.15); color: #818cf8; }
.news-badge--technology { background-color: rgba(168, 85, 247, 0.15); color: #c084fc; }
.news-badge--science { background-color: rgba(34, 197, 94, 0.15); color: #4ade80; }
.news-badge--finance { background-color: rgba(234, 179, 8, 0.15); color: #facc15; }
.news-badge--writing { background-color: rgba(236, 72, 153, 0.15); color: #f472b6; }
.news-badge--research { background-color: rgba(6, 182, 212, 0.15); color: #22d3ee; }
.news-card-source {
font-size: 12px;
@@ -1069,6 +1088,477 @@ h1, h2, h3, h4, h5, h6 {
margin: 0;
}
.news-card--selected {
border-color: #91a4d2;
background-color: rgba(145, 164, 210, 0.08);
}
.news-card--no-thumb {
min-height: 200px;
}
.news-card--no-thumb .news-card-body {
text-align: center;
justify-content: center;
}
.news-card--no-thumb .news-card-meta {
justify-content: center;
}
.news-card {
cursor: pointer;
}
/* ===== Dashboard Split View ===== */
.dashboard-split {
display: flex;
gap: 24px;
min-height: 60vh;
}
.dashboard-full {
display: block;
}
/* ===== Dashboard With Sidebar ===== */
.dashboard-with-sidebar {
display: flex;
gap: 24px;
min-height: 60vh;
}
.dashboard-full-grid {
width: 100%;
}
.dashboard-left {
width: 40%;
min-width: 0;
overflow-y: auto;
max-height: 80vh;
}
.dashboard-right {
width: 60%;
min-width: 0;
position: sticky;
top: 24px;
align-self: flex-start;
max-height: 80vh;
overflow-y: auto;
}
.news-grid--compact {
display: grid;
grid-template-columns: 1fr;
gap: 16px;
}
.dashboard-loading {
text-align: center;
padding: 24px;
color: #8892a8;
font-size: 14px;
}
/* ===== Topic Tabs ===== */
.topic-tab-wrapper {
display: inline-flex;
align-items: center;
gap: 2px;
}
.topic-remove {
background: none;
border: none;
color: #5a6478;
font-size: 12px;
cursor: pointer;
padding: 2px 4px;
border-radius: 4px;
transition: color 0.15s ease;
}
.topic-remove:hover {
color: #f87171;
}
.topic-add-btn {
padding: 6px 14px;
border-radius: 20px;
border: 1px dashed #2a2f3d;
background-color: transparent;
color: #5a6478;
font-size: 16px;
cursor: pointer;
transition: all 0.15s ease;
font-family: 'Inter', sans-serif;
line-height: 1;
}
.topic-add-btn:hover {
border-color: #91a4d2;
color: #91a4d2;
}
.topic-input-wrapper {
display: inline-flex;
align-items: center;
gap: 8px;
}
.topic-input {
padding: 5px 12px;
border-radius: 20px;
border: 1px solid #2a2f3d;
background-color: #1a1d26;
color: #e2e8f0;
font-size: 13px;
font-family: 'Inter', sans-serif;
outline: none;
width: 140px;
}
.topic-input:focus {
border-color: #91a4d2;
}
.topic-cancel-btn {
background: none;
border: none;
color: #5a6478;
font-size: 12px;
cursor: pointer;
}
.topic-cancel-btn:hover {
color: #e2e8f0;
}
/* ===== Settings Panel ===== */
.settings-toggle {
margin-left: auto;
}
.settings-panel {
background-color: #1a1d26;
border: 1px solid #2a2f3d;
border-radius: 12px;
padding: 20px;
margin-bottom: 24px;
}
.settings-panel-title {
font-size: 15px;
font-weight: 600;
color: #e2e8f0;
margin: 0 0 16px;
}
.settings-field {
margin-bottom: 12px;
}
.settings-field label {
display: block;
font-size: 12px;
color: #8892a8;
margin-bottom: 4px;
font-weight: 500;
}
.settings-input {
width: 100%;
max-width: 400px;
padding: 8px 12px;
border-radius: 8px;
border: 1px solid #2a2f3d;
background-color: #0f1116;
color: #e2e8f0;
font-size: 13px;
font-family: 'Inter', sans-serif;
outline: none;
}
.settings-input:focus {
border-color: #91a4d2;
}
.settings-hint {
background-color: rgba(234, 179, 8, 0.1);
border: 1px solid rgba(234, 179, 8, 0.3);
border-radius: 8px;
padding: 12px 16px;
margin-bottom: 16px;
font-size: 13px;
color: #facc15;
}
/* ===== Article Detail Panel ===== */
.article-detail-panel {
background-color: #1a1d26;
border: 1px solid #2a2f3d;
border-radius: 12px;
padding: 24px;
position: relative;
}
.article-detail-close {
position: absolute;
top: 16px;
right: 16px;
background: none;
border: 1px solid #2a2f3d;
color: #8892a8;
width: 32px;
height: 32px;
border-radius: 8px;
cursor: pointer;
font-size: 14px;
font-weight: 600;
display: flex;
align-items: center;
justify-content: center;
transition: all 0.15s ease;
}
.article-detail-close:hover {
border-color: #f87171;
color: #f87171;
}
.article-detail-content {
padding-right: 40px;
}
.article-detail-title {
font-size: 22px;
font-weight: 700;
color: #f1f5f9;
margin: 0 0 12px;
line-height: 1.3;
}
.article-detail-meta {
display: flex;
align-items: center;
gap: 10px;
margin-bottom: 20px;
flex-wrap: wrap;
}
.article-detail-source {
font-size: 13px;
color: #8892a8;
display: inline-flex;
align-items: center;
gap: 6px;
}
.source-favicon {
width: 16px;
height: 16px;
border-radius: 2px;
flex-shrink: 0;
}
.article-detail-date {
font-size: 13px;
color: #5a6478;
}
.article-detail-body {
margin-bottom: 20px;
}
.article-detail-body p {
font-size: 14px;
line-height: 1.7;
color: #c8d0e0;
margin: 0;
}
.article-detail-link {
display: inline-block;
font-size: 13px;
color: #91a4d2;
text-decoration: none;
margin-bottom: 20px;
transition: color 0.15s ease;
}
.article-detail-link:hover {
color: #b4c4e8;
}
/* ---- AI Summary Bubble ---- */
.ai-summary-bubble {
margin-top: 20px;
background-color: rgba(145, 164, 210, 0.08);
border: 1px solid rgba(145, 164, 210, 0.18);
border-radius: 12px;
padding: 16px 18px;
position: relative;
}
.ai-summary-bubble-text {
font-size: 14px;
line-height: 1.65;
color: #c8d0e0;
margin: 0;
white-space: pre-wrap;
}
.ai-summary-bubble-label {
display: block;
font-size: 11px;
font-weight: 600;
color: #91a4d2;
margin-top: 12px;
text-transform: uppercase;
letter-spacing: 0.05em;
opacity: 0.6;
}
.ai-summary-bubble-loading {
display: flex;
align-items: center;
gap: 10px;
font-size: 14px;
color: #91a4d2;
font-style: italic;
}
/* Pulsing dots animation for loading states */
.ai-summary-dot-pulse {
display: flex;
gap: 4px;
}
.ai-summary-dot-pulse::before,
.ai-summary-dot-pulse::after,
.ai-summary-dot-pulse {
position: relative;
}
.ai-summary-dot-pulse::before,
.ai-summary-dot-pulse::after {
content: "";
display: inline-block;
width: 6px;
height: 6px;
border-radius: 50%;
background: #91a4d2;
animation: dotPulse 1.2s infinite ease-in-out;
}
.ai-summary-dot-pulse::after {
animation-delay: 0.4s;
}
@keyframes dotPulse {
0%, 80%, 100% { opacity: 0.3; transform: scale(0.8); }
40% { opacity: 1; transform: scale(1); }
}
/* ---- Follow-up Chat ---- */
.article-chat {
margin-top: 16px;
border-top: 1px solid #2a2f3d;
padding-top: 16px;
}
.article-chat-messages {
max-height: 300px;
overflow-y: auto;
display: flex;
flex-direction: column;
gap: 10px;
margin-bottom: 12px;
padding-right: 4px;
}
.chat-msg {
max-width: 85%;
padding: 10px 14px;
border-radius: 12px;
font-size: 14px;
line-height: 1.55;
}
.chat-msg p {
margin: 0;
white-space: pre-wrap;
}
.chat-msg--user {
align-self: flex-end;
background: rgba(99, 132, 210, 0.2);
border: 1px solid rgba(99, 132, 210, 0.3);
color: #d0d8ee;
border-bottom-right-radius: 4px;
}
.chat-msg--assistant {
align-self: flex-start;
background: rgba(145, 164, 210, 0.08);
border: 1px solid rgba(145, 164, 210, 0.15);
color: #c8d0e0;
border-bottom-left-radius: 4px;
}
.chat-msg--typing {
padding: 12px 16px;
}
.article-chat-input {
display: flex;
gap: 8px;
}
.article-chat-textbox {
flex: 1;
background: #1a1e2e;
border: 1px solid #2a2f3d;
border-radius: 8px;
padding: 10px 14px;
font-size: 14px;
color: #c8d0e0;
outline: none;
transition: border-color 0.2s;
}
.article-chat-textbox:focus {
border-color: #6384d2;
}
.article-chat-textbox:disabled {
opacity: 0.5;
}
.article-chat-send {
background: #6384d2;
color: #fff;
border: none;
border-radius: 8px;
padding: 10px 18px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: background 0.2s;
white-space: nowrap;
}
.article-chat-send:hover:not(:disabled) {
background: #5270b8;
}
.article-chat-send:disabled {
opacity: 0.5;
cursor: not-allowed;
}
/* ===== Providers Page ===== */
.providers-page {
max-width: 960px;
@@ -1790,6 +2280,104 @@ h1, h2, h3, h4, h5, h6 {
margin-top: 24px;
}
/* ===== Dashboard Sidebar ===== */
.dashboard-sidebar {
width: 30%;
min-width: 240px;
max-width: 320px;
position: sticky;
top: 24px;
align-self: flex-start;
max-height: 80vh;
overflow-y: auto;
border-left: 1px solid #1e222d;
padding-left: 24px;
display: flex;
flex-direction: column;
gap: 24px;
}
.sidebar-section {
display: flex;
flex-direction: column;
gap: 10px;
}
.sidebar-section-title {
font-size: 12px;
font-weight: 600;
color: #5a6478;
text-transform: uppercase;
letter-spacing: 0.05em;
margin: 0;
}
.sidebar-status-row {
display: flex;
align-items: center;
gap: 8px;
}
.sidebar-status-dot {
width: 8px;
height: 8px;
border-radius: 50%;
flex-shrink: 0;
}
.sidebar-status-dot--online {
background-color: #4ade80;
box-shadow: 0 0 6px rgba(74, 222, 128, 0.4);
}
.sidebar-status-dot--offline {
background-color: #f87171;
box-shadow: 0 0 6px rgba(248, 113, 113, 0.4);
}
.sidebar-status-label {
font-size: 13px;
color: #e2e8f0;
font-weight: 500;
}
.sidebar-model-list {
display: flex;
flex-wrap: wrap;
gap: 6px;
}
.sidebar-model-tag {
display: inline-block;
font-size: 11px;
font-weight: 500;
padding: 3px 10px;
border-radius: 12px;
background-color: rgba(145, 164, 210, 0.1);
color: #91a4d2;
border: 1px solid rgba(145, 164, 210, 0.2);
}
.sidebar-topic-link {
display: block;
width: 100%;
text-align: left;
background: none;
border: none;
padding: 6px 10px;
border-radius: 6px;
font-size: 13px;
font-family: 'Inter', sans-serif;
color: #8892a8;
cursor: pointer;
transition: background-color 0.15s ease, color 0.15s ease;
}
.sidebar-topic-link:hover {
background-color: rgba(145, 164, 210, 0.08);
color: #e2e8f0;
}
/* ===== Responsive: Dashboard Pages ===== */
@media (max-width: 1024px) {
.news-grid,
@@ -1809,6 +2397,34 @@ h1, h2, h3, h4, h5, h6 {
.org-stats-bar {
flex-wrap: wrap;
}
.dashboard-sidebar {
display: none;
}
.dashboard-with-sidebar {
display: block;
}
.dashboard-split {
flex-direction: column;
}
.dashboard-left {
width: 100%;
max-height: none;
overflow-y: visible;
}
.dashboard-right {
width: 100%;
position: static;
max-height: none;
}
.news-grid--compact {
grid-template-columns: repeat(2, 1fr);
}
}
@media (max-width: 768px) {
@@ -1818,6 +2434,10 @@ h1, h2, h3, h4, h5, h6 {
grid-template-columns: 1fr;
}
.news-grid--compact {
grid-template-columns: 1fr;
}
.chat-page {
flex-direction: column;
height: auto;

View File

@@ -162,6 +162,147 @@
}
}
@layer utilities {
.modal {
@layer daisyui.l1.l2.l3 {
pointer-events: none;
visibility: hidden;
position: fixed;
inset: calc(0.25rem * 0);
margin: calc(0.25rem * 0);
display: grid;
height: 100%;
max-height: none;
width: 100%;
max-width: none;
align-items: center;
justify-items: center;
background-color: transparent;
padding: calc(0.25rem * 0);
color: inherit;
transition: visibility 0.3s allow-discrete, background-color 0.3s ease-out, opacity 0.1s ease-out;
overflow: clip;
overscroll-behavior: contain;
z-index: 999;
scrollbar-gutter: auto;
&::backdrop {
display: none;
}
}
@layer daisyui.l1.l2 {
&.modal-open, &[open], &:target, .modal-toggle:checked + & {
pointer-events: auto;
visibility: visible;
opacity: 100%;
transition: visibility 0s allow-discrete, background-color 0.3s ease-out, opacity 0.1s ease-out;
background-color: oklch(0% 0 0/ 0.4);
.modal-box {
translate: 0 0;
scale: 1;
opacity: 1;
}
:root:has(&) {
--page-has-backdrop: 1;
--page-overflow: hidden;
--page-scroll-bg: var(--page-scroll-bg-on);
--page-scroll-gutter: stable;
--page-scroll-transition: var(--page-scroll-transition-on);
animation: set-page-has-scroll forwards;
animation-timeline: scroll();
}
}
@starting-style {
&.modal-open, &[open], &:target, .modal-toggle:checked + & {
opacity: 0%;
}
}
}
}
.tab {
@layer daisyui.l1.l2.l3 {
position: relative;
display: inline-flex;
cursor: pointer;
appearance: none;
flex-wrap: wrap;
align-items: center;
justify-content: center;
text-align: center;
webkit-user-select: none;
user-select: none;
&:hover {
@media (hover: hover) {
color: var(--color-base-content);
}
}
--tab-p: 0.75rem;
--tab-bg: var(--color-base-100);
--tab-border-color: var(--color-base-300);
--tab-radius-ss: 0;
--tab-radius-se: 0;
--tab-radius-es: 0;
--tab-radius-ee: 0;
--tab-order: 0;
--tab-radius-min: calc(0.75rem - var(--border));
--tab-radius-limit: min(var(--radius-field), var(--tab-radius-min));
--tab-radius-grad: #0000 calc(69% - var(--border)),
var(--tab-border-color) calc(69% - var(--border) + 0.25px),
var(--tab-border-color) 69%,
var(--tab-bg) calc(69% + 0.25px);
border-color: #0000;
order: var(--tab-order);
height: var(--tab-height);
font-size: 0.875rem;
padding-inline: var(--tab-p);
&:is(input[type="radio"]) {
min-width: fit-content;
&:after {
--tw-content: attr(aria-label);
content: var(--tw-content);
}
}
&:is(label) {
position: relative;
input {
position: absolute;
inset: calc(0.25rem * 0);
cursor: pointer;
appearance: none;
opacity: 0%;
}
}
&:checked, &:is(label:has(:checked)), &:is(.tab-active, [aria-selected="true"], [aria-current="true"], [aria-current="page"]) {
& + .tab-content {
display: block;
}
}
&:not( :checked, label:has(:checked), :hover, .tab-active, [aria-selected="true"], [aria-current="true"], [aria-current="page"] ) {
color: var(--color-base-content);
@supports (color: color-mix(in lab, red, red)) {
color: color-mix(in oklab, var(--color-base-content) 50%, transparent);
}
}
&:not(input):empty {
flex-grow: 1;
cursor: default;
}
&:focus {
--tw-outline-style: none;
outline-style: none;
@media (forced-colors: active) {
outline: 2px solid transparent;
outline-offset: 2px;
}
}
&:focus-visible, &:is(label:has(:checked:focus-visible)) {
outline: 2px solid currentColor;
outline-offset: -5px;
}
&[disabled] {
pointer-events: none;
opacity: 40%;
}
}
}
.btn {
:where(&) {
@layer daisyui.l1.l2.l3 {
@@ -314,6 +455,65 @@
.visible {
visibility: visible;
}
.list {
@layer daisyui.l1.l2.l3 {
display: flex;
flex-direction: column;
font-size: 0.875rem;
.list-row {
--list-grid-cols: minmax(0, auto) 1fr;
position: relative;
display: grid;
grid-auto-flow: column;
gap: calc(0.25rem * 4);
border-radius: var(--radius-box);
padding: calc(0.25rem * 4);
word-break: break-word;
grid-template-columns: var(--list-grid-cols);
}
& > :not(:last-child) {
&.list-row, .list-row {
&:after {
content: "";
border-bottom: var(--border) solid;
inset-inline: var(--radius-box);
position: absolute;
bottom: calc(0.25rem * 0);
border-color: var(--color-base-content);
@supports (color: color-mix(in lab, red, red)) {
border-color: color-mix(in oklab, var(--color-base-content) 5%, transparent);
}
}
}
}
}
@layer daisyui.l1.l2 {
.list-row {
&:has(.list-col-grow:nth-child(1)) {
--list-grid-cols: 1fr;
}
&:has(.list-col-grow:nth-child(2)) {
--list-grid-cols: minmax(0, auto) 1fr;
}
&:has(.list-col-grow:nth-child(3)) {
--list-grid-cols: minmax(0, auto) minmax(0, auto) 1fr;
}
&:has(.list-col-grow:nth-child(4)) {
--list-grid-cols: minmax(0, auto) minmax(0, auto) minmax(0, auto) 1fr;
}
&:has(.list-col-grow:nth-child(5)) {
--list-grid-cols: minmax(0, auto) minmax(0, auto) minmax(0, auto) minmax(0, auto) 1fr;
}
&:has(.list-col-grow:nth-child(6)) {
--list-grid-cols: minmax(0, auto) minmax(0, auto) minmax(0, auto) minmax(0, auto)
minmax(0, auto) 1fr;
}
> * {
grid-row-start: 1;
}
}
}
}
.toggle {
@layer daisyui.l1.l2.l3 {
border: var(--border) solid currentColor;
@@ -589,6 +789,75 @@
}
}
}
.table {
@layer daisyui.l1.l2.l3 {
font-size: 0.875rem;
position: relative;
width: 100%;
border-collapse: separate;
--tw-border-spacing-x: calc(0.25rem * 0);
--tw-border-spacing-y: calc(0.25rem * 0);
border-spacing: var(--tw-border-spacing-x) var(--tw-border-spacing-y);
border-radius: var(--radius-box);
text-align: left;
&:where(:dir(rtl), [dir="rtl"], [dir="rtl"] *) {
text-align: right;
}
tr.row-hover {
&, &:nth-child(even) {
&:hover {
@media (hover: hover) {
background-color: var(--color-base-200);
}
}
}
}
:where(th, td) {
padding-inline: calc(0.25rem * 4);
padding-block: calc(0.25rem * 3);
vertical-align: middle;
}
:where(thead, tfoot) {
white-space: nowrap;
color: var(--color-base-content);
@supports (color: color-mix(in lab, red, red)) {
color: color-mix(in oklab, var(--color-base-content) 60%, transparent);
}
font-size: 0.875rem;
font-weight: 600;
}
:where(tfoot tr:first-child :is(td, th)) {
border-top: var(--border) solid var(--color-base-content);
@supports (color: color-mix(in lab, red, red)) {
border-top: var(--border) solid color-mix(in oklch, var(--color-base-content) 5%, #0000);
}
}
:where(.table-pin-rows thead tr) {
position: sticky;
top: calc(0.25rem * 0);
z-index: 1;
background-color: var(--color-base-100);
}
:where(.table-pin-rows tfoot tr) {
position: sticky;
bottom: calc(0.25rem * 0);
z-index: 1;
background-color: var(--color-base-100);
}
:where(.table-pin-cols tr th) {
position: sticky;
right: calc(0.25rem * 0);
left: calc(0.25rem * 0);
background-color: var(--color-base-100);
}
:where(thead tr :is(td, th), tbody tr:not(:last-child) :is(td, th)) {
border-bottom: var(--border) solid var(--color-base-content);
@supports (color: color-mix(in lab, red, red)) {
border-bottom: var(--border) solid color-mix(in oklch, var(--color-base-content) 5%, #0000);
}
}
}
}
.steps {
@layer daisyui.l1.l2.l3 {
display: inline-grid;
@@ -699,6 +968,34 @@
}
}
}
.chat-bubble {
@layer daisyui.l1.l2.l3 {
position: relative;
display: block;
width: fit-content;
border-radius: var(--radius-field);
background-color: var(--color-base-300);
padding-inline: calc(0.25rem * 4);
padding-block: calc(0.25rem * 2);
color: var(--color-base-content);
grid-row-end: 3;
min-height: 2rem;
min-width: 2.5rem;
max-width: 90%;
&:before {
position: absolute;
bottom: calc(0.25rem * 0);
height: calc(0.25rem * 3);
width: calc(0.25rem * 3);
background-color: inherit;
content: "";
mask-repeat: no-repeat;
mask-image: var(--mask-chat);
mask-position: 0px -1px;
mask-size: 0.8125rem;
}
}
}
.select {
@layer daisyui.l1.l2.l3 {
border: var(--border) solid #0000;
@@ -934,6 +1231,15 @@
}
}
}
.stats {
@layer daisyui.l1.l2.l3 {
position: relative;
display: inline-grid;
grid-auto-flow: column;
overflow-x: auto;
border-radius: var(--radius-box);
}
}
.progress {
@layer daisyui.l1.l2.l3 {
position: relative;
@@ -999,6 +1305,76 @@
.end {
inset-inline-end: var(--spacing);
}
.join {
display: inline-flex;
align-items: stretch;
--join-ss: 0;
--join-se: 0;
--join-es: 0;
--join-ee: 0;
:where(.join-item) {
border-start-start-radius: var(--join-ss, 0);
border-start-end-radius: var(--join-se, 0);
border-end-start-radius: var(--join-es, 0);
border-end-end-radius: var(--join-ee, 0);
* {
--join-ss: var(--radius-field);
--join-se: var(--radius-field);
--join-es: var(--radius-field);
--join-ee: var(--radius-field);
}
}
> .join-item:where(:first-child) {
--join-ss: var(--radius-field);
--join-se: 0;
--join-es: var(--radius-field);
--join-ee: 0;
}
:first-child:not(:last-child) {
:where(.join-item) {
--join-ss: var(--radius-field);
--join-se: 0;
--join-es: var(--radius-field);
--join-ee: 0;
}
}
> .join-item:where(:last-child) {
--join-ss: 0;
--join-se: var(--radius-field);
--join-es: 0;
--join-ee: var(--radius-field);
}
:last-child:not(:first-child) {
:where(.join-item) {
--join-ss: 0;
--join-se: var(--radius-field);
--join-es: 0;
--join-ee: var(--radius-field);
}
}
> .join-item:where(:only-child) {
--join-ss: var(--radius-field);
--join-se: var(--radius-field);
--join-es: var(--radius-field);
--join-ee: var(--radius-field);
}
:only-child {
:where(.join-item) {
--join-ss: var(--radius-field);
--join-se: var(--radius-field);
--join-es: var(--radius-field);
--join-ee: var(--radius-field);
}
}
> :where(:focus, :has(:focus)) {
z-index: 1;
}
@media (hover: hover) {
> :where(.btn:hover, :has(.btn:hover)) {
isolation: isolate;
}
}
}
.hero-content {
@layer daisyui.l1.l2.l3 {
isolation: isolate;
@@ -1122,6 +1498,51 @@
max-width: 96rem;
}
}
.filter {
@layer daisyui.l1.l2.l3 {
display: flex;
flex-wrap: wrap;
input[type="radio"] {
width: auto;
}
input {
overflow: hidden;
opacity: 100%;
scale: 1;
transition: margin 0.1s, opacity 0.3s, padding 0.3s, border-width 0.1s;
&:not(:last-child) {
margin-inline-end: calc(0.25rem * 1);
}
&.filter-reset {
aspect-ratio: 1 / 1;
&::after {
--tw-content: "×";
content: var(--tw-content);
}
}
}
&:not(:has(input:checked:not(.filter-reset))) {
.filter-reset, input[type="reset"] {
scale: 0;
border-width: 0;
margin-inline: calc(0.25rem * 0);
width: calc(0.25rem * 0);
padding-inline: calc(0.25rem * 0);
opacity: 0%;
}
}
&:has(input:checked:not(.filter-reset)) {
input:not(:checked, .filter-reset, input[type="reset"]) {
scale: 0;
border-width: 0;
margin-inline: calc(0.25rem * 0);
width: calc(0.25rem * 0);
padding-inline: calc(0.25rem * 0);
opacity: 0%;
}
}
}
}
.label {
@layer daisyui.l1.l2.l3 {
display: inline-flex;
@@ -1208,6 +1629,17 @@
padding-inline: calc(var(--size) / 2 - var(--border));
}
}
.tabs {
@layer daisyui.l1.l2.l3 {
display: flex;
flex-wrap: wrap;
--tabs-height: auto;
--tabs-direction: row;
--tab-height: calc(var(--size-field, 0.25rem) * 10);
height: var(--tabs-height);
flex-direction: var(--tabs-direction);
}
}
.footer {
@layer daisyui.l1.l2.l3 {
display: grid;
@@ -1233,6 +1665,15 @@
}
}
}
.chat {
@layer daisyui.l1.l2.l3 {
display: grid;
grid-auto-rows: min-content;
column-gap: calc(0.25rem * 3);
padding-block: calc(0.25rem * 1);
--mask-chat: url("data:image/svg+xml,%3csvg width='13' height='13' xmlns='http://www.w3.org/2000/svg'%3e%3cpath fill='black' d='M0 11.5004C0 13.0004 2 13.0004 2 13.0004H12H13V0.00036329L12.5 0C12.5 0 11.977 2.09572 11.8581 2.50033C11.6075 3.35237 10.9149 4.22374 9 5.50036C6 7.50036 0 10.0004 0 11.5004Z'/%3e%3c/svg%3e");
}
}
.card-title {
@layer daisyui.l1.l2.l3 {
display: flex;
@@ -1242,12 +1683,21 @@
font-weight: 600;
}
}
.block {
display: block;
}
.grid {
display: grid;
}
.hidden {
display: none;
}
.inline {
display: inline;
}
.table {
display: table;
}
.transform {
transform: var(--tw-rotate-x,) var(--tw-rotate-y,) var(--tw-rotate-z,) var(--tw-skew-x,) var(--tw-skew-y,);
}
@@ -1311,6 +1761,9 @@
}
}
}
.filter {
filter: var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,);
}
.btn-outline {
@layer daisyui.l1 {
&:not( .btn-active, :hover, :active:focus, :focus-visible, input:checked:not(.filter .btn), :disabled, [disabled], .btn-disabled ) {
@@ -1351,6 +1804,12 @@
--btn-fg: var(--color-primary-content);
}
}
.btn-secondary {
@layer daisyui.l1.l2.l3 {
--btn-color: var(--color-secondary);
--btn-fg: var(--color-secondary-content);
}
}
}
@layer base {
:where(:root),:root:has(input.theme-controller[value=light]:checked),[data-theme=light] {
@@ -1724,6 +2183,59 @@
inherits: false;
initial-value: solid;
}
@property --tw-blur {
syntax: "*";
inherits: false;
}
@property --tw-brightness {
syntax: "*";
inherits: false;
}
@property --tw-contrast {
syntax: "*";
inherits: false;
}
@property --tw-grayscale {
syntax: "*";
inherits: false;
}
@property --tw-hue-rotate {
syntax: "*";
inherits: false;
}
@property --tw-invert {
syntax: "*";
inherits: false;
}
@property --tw-opacity {
syntax: "*";
inherits: false;
}
@property --tw-saturate {
syntax: "*";
inherits: false;
}
@property --tw-sepia {
syntax: "*";
inherits: false;
}
@property --tw-drop-shadow {
syntax: "*";
inherits: false;
}
@property --tw-drop-shadow-color {
syntax: "*";
inherits: false;
}
@property --tw-drop-shadow-alpha {
syntax: "<percentage>";
inherits: false;
initial-value: 100%;
}
@property --tw-drop-shadow-size {
syntax: "*";
inherits: false;
}
@layer properties {
@supports ((-webkit-hyphens: none) and (not (margin-trim: inline))) or ((-moz-orient: inline) and (not (color:rgb(from red r g b)))) {
*, ::before, ::after, ::backdrop {
@@ -1733,6 +2245,19 @@
--tw-skew-x: initial;
--tw-skew-y: initial;
--tw-outline-style: solid;
--tw-blur: initial;
--tw-brightness: initial;
--tw-contrast: initial;
--tw-grayscale: initial;
--tw-hue-rotate: initial;
--tw-invert: initial;
--tw-opacity: initial;
--tw-saturate: initial;
--tw-sepia: initial;
--tw-drop-shadow: initial;
--tw-drop-shadow-color: initial;
--tw-drop-shadow-alpha: 100%;
--tw-drop-shadow-size: initial;
}
}
}

View File

@@ -0,0 +1,158 @@
use crate::infrastructure::llm::FollowUpMessage;
use crate::models::NewsCard;
use dioxus::prelude::*;
/// Side panel displaying the full details of a selected news article.
///
/// Shows the article title, source, date, category badge, full content,
/// a link to the original article, an AI summary bubble, and a follow-up
/// chat window for asking questions about the article.
///
/// # Arguments
///
/// * `card` - The selected news card data
/// * `on_close` - Handler to close the detail panel
/// * `summary` - Optional AI-generated summary text
/// * `is_summarizing` - Whether a summarization request is in progress
/// * `chat_messages` - Follow-up chat conversation history (user + assistant turns)
/// * `is_chatting` - Whether a chat response is being generated
/// * `on_chat_send` - Handler called with the user's follow-up question
#[component]
pub fn ArticleDetail(
card: NewsCard,
on_close: EventHandler,
summary: Option<String>,
#[props(default = false)] is_summarizing: bool,
chat_messages: Vec<FollowUpMessage>,
#[props(default = false)] is_chatting: bool,
on_chat_send: EventHandler<String>,
) -> Element {
let css_suffix = card.category.to_lowercase().replace(' ', "-");
let badge_class = format!("news-badge news-badge--{css_suffix}");
let mut chat_input = use_signal(String::new);
let has_summary = summary.is_some() && !is_summarizing;
// Build favicon URL using DuckDuckGo's privacy-friendly icon service
let favicon_url = format!("https://icons.duckduckgo.com/ip3/{}.ico", card.source);
rsx! {
aside { class: "article-detail-panel",
// Close button
button {
class: "article-detail-close",
onclick: move |_| on_close.call(()),
"X"
}
div { class: "article-detail-content",
// Header
h2 { class: "article-detail-title", "{card.title}" }
div { class: "article-detail-meta",
span { class: "{badge_class}", "{card.category}" }
span { class: "article-detail-source",
img {
class: "source-favicon",
src: "{favicon_url}",
alt: "",
width: "16",
height: "16",
}
"{card.source}"
}
span { class: "article-detail-date", "{card.published_at}" }
}
// Content body
div { class: "article-detail-body",
p { "{card.content}" }
}
// Link to original
a {
class: "article-detail-link",
href: "{card.url}",
target: "_blank",
rel: "noopener",
"Read original article"
}
// AI Summary bubble (below the link)
div { class: "ai-summary-bubble",
if is_summarizing {
div { class: "ai-summary-bubble-loading",
div { class: "ai-summary-dot-pulse" }
span { "Summarizing..." }
}
} else if let Some(ref text) = summary {
p { class: "ai-summary-bubble-text", "{text}" }
span { class: "ai-summary-bubble-label", "Summarized with AI" }
}
}
// Follow-up chat window (visible after summary is ready)
if has_summary {
div { class: "article-chat",
// Chat message history
if !chat_messages.is_empty() {
div { class: "article-chat-messages",
for msg in chat_messages.iter() {
{
let bubble_class = if msg.role == "user" {
"chat-msg chat-msg--user"
} else {
"chat-msg chat-msg--assistant"
};
rsx! {
div { class: "{bubble_class}",
p { "{msg.content}" }
}
}
}
}
if is_chatting {
div { class: "chat-msg chat-msg--assistant chat-msg--typing",
div { class: "ai-summary-dot-pulse" }
}
}
}
}
// Chat input
div { class: "article-chat-input",
input {
class: "article-chat-textbox",
r#type: "text",
placeholder: "Ask a follow-up question...",
value: "{chat_input}",
disabled: is_chatting,
oninput: move |e| chat_input.set(e.value()),
onkeypress: move |e| {
if e.key() == Key::Enter && !is_chatting {
let val = chat_input.read().trim().to_string();
if !val.is_empty() {
on_chat_send.call(val);
chat_input.set(String::new());
}
}
},
}
button {
class: "article-chat-send",
disabled: is_chatting,
onclick: move |_| {
let val = chat_input.read().trim().to_string();
if !val.is_empty() {
on_chat_send.call(val);
chat_input.set(String::new());
}
},
"Send"
}
}
}
}
}
}
}
}

View File

@@ -0,0 +1,112 @@
use dioxus::prelude::*;
use crate::infrastructure::ollama::{get_ollama_status, OllamaStatus};
/// Right sidebar for the dashboard, showing Ollama status, trending topics,
/// and recent search history.
///
/// Appears when no article card is selected. Disappears when the user opens
/// the article detail split view.
///
/// # Props
///
/// * `ollama_url` - Ollama instance URL for status polling
/// * `trending` - Trending topic keywords extracted from recent news headlines
/// * `recent_searches` - Recent search topics stored in localStorage
/// * `on_topic_click` - Fires when a trending or recent topic is clicked
#[component]
pub fn DashboardSidebar(
ollama_url: String,
trending: Vec<String>,
recent_searches: Vec<String>,
on_topic_click: EventHandler<String>,
) -> Element {
// Fetch Ollama status once on mount.
// use_resource with no signal dependencies runs exactly once and
// won't re-fire on parent re-renders (unlike use_effect).
let url = ollama_url.clone();
let status_resource = use_resource(move || {
let u = url.clone();
async move {
get_ollama_status(u).await.unwrap_or(OllamaStatus {
online: false,
models: Vec::new(),
})
}
});
let current_status: OllamaStatus =
status_resource
.read()
.as_ref()
.cloned()
.unwrap_or(OllamaStatus {
online: false,
models: Vec::new(),
});
rsx! {
aside { class: "dashboard-sidebar",
// -- Ollama Status Section --
div { class: "sidebar-section",
h4 { class: "sidebar-section-title", "Ollama Status" }
div { class: "sidebar-status-row",
span { class: if current_status.online { "sidebar-status-dot sidebar-status-dot--online" } else { "sidebar-status-dot sidebar-status-dot--offline" } }
span { class: "sidebar-status-label",
if current_status.online {
"Online"
} else {
"Offline"
}
}
}
if !current_status.models.is_empty() {
div { class: "sidebar-model-list",
for model in current_status.models.iter() {
span { class: "sidebar-model-tag", "{model}" }
}
}
}
}
// -- Trending Topics Section --
if !trending.is_empty() {
div { class: "sidebar-section",
h4 { class: "sidebar-section-title", "Trending" }
for topic in trending.iter() {
{
let t = topic.clone();
rsx! {
button {
class: "sidebar-topic-link",
onclick: move |_| on_topic_click.call(t.clone()),
"{topic}"
}
}
}
}
}
}
// -- Recent Searches Section --
if !recent_searches.is_empty() {
div { class: "sidebar-section",
h4 { class: "sidebar-section-title", "Recent Searches" }
for search in recent_searches.iter() {
{
let s = search.clone();
rsx! {
button {
class: "sidebar-topic-link",
onclick: move |_| on_topic_click.call(s.clone()),
"{search}"
}
}
}
}
}
}
}
}
}

View File

@@ -1,6 +1,8 @@
mod app_shell;
mod article_detail;
mod card;
mod chat_bubble;
mod dashboard_sidebar;
mod file_row;
mod login;
mod member_row;
@@ -12,8 +14,10 @@ pub mod sub_nav;
mod tool_card;
pub use app_shell::*;
pub use article_detail::*;
pub use card::*;
pub use chat_bubble::*;
pub use dashboard_sidebar::*;
pub use file_row::*;
pub use login::*;
pub use member_row::*;

View File

@@ -1,40 +1,67 @@
use crate::models::{NewsCard as NewsCardModel, NewsCategory};
use crate::models::NewsCard as NewsCardModel;
use dioxus::prelude::*;
/// Renders a news feed card with title, source, category badge, and summary.
///
/// When a thumbnail URL is present but the image fails to load, the card
/// automatically switches to the centered no-thumbnail layout.
///
/// # Arguments
///
/// * `card` - The news card model data to render
/// * `on_click` - Event handler triggered when the card is clicked
/// * `selected` - Whether this card is currently selected (highlighted)
#[component]
pub fn NewsCardView(card: NewsCardModel) -> Element {
let badge_class = format!("news-badge news-badge--{}", card.category.css_class());
pub fn NewsCardView(
card: NewsCardModel,
on_click: EventHandler<NewsCardModel>,
#[props(default = false)] selected: bool,
) -> Element {
// Derive a CSS class from the category string (lowercase, hyphenated)
let css_suffix = card.category.to_lowercase().replace(' ', "-");
let badge_class = format!("news-badge news-badge--{css_suffix}");
// Track whether the thumbnail loaded successfully.
// Starts as true if a URL is provided; set to false on image error.
let has_thumb_url = card.thumbnail_url.is_some();
let mut thumb_ok = use_signal(|| has_thumb_url);
let show_thumb = has_thumb_url && *thumb_ok.read();
let selected_cls = if selected { " news-card--selected" } else { "" };
let thumb_cls = if show_thumb {
""
} else {
" news-card--no-thumb"
};
let card_class = format!("news-card{selected_cls}{thumb_cls}");
// Clone the card for the click handler closure
let card_for_click = card.clone();
rsx! {
article { class: "news-card",
article {
class: "{card_class}",
onclick: move |_| on_click.call(card_for_click.clone()),
if let Some(ref thumb) = card.thumbnail_url {
div { class: "news-card-thumb",
img {
src: "{thumb}",
alt: "{card.title}",
loading: "lazy",
if *thumb_ok.read() {
div { class: "news-card-thumb",
img {
src: "{thumb}",
alt: "",
loading: "lazy",
// Hide the thumbnail container if the image fails to load
onerror: move |_| thumb_ok.set(false),
}
}
}
}
div { class: "news-card-body",
div { class: "news-card-meta",
span { class: "{badge_class}", "{card.category.label()}" }
span { class: "{badge_class}", "{card.category}" }
span { class: "news-card-source", "{card.source}" }
span { class: "news-card-date", "{card.published_at}" }
}
h3 { class: "news-card-title",
a {
href: "{card.url}",
target: "_blank",
rel: "noopener",
"{card.title}"
}
}
h3 { class: "news-card-title", "{card.title}" }
p { class: "news-card-summary", "{card.summary}" }
}
}
@@ -48,7 +75,12 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "Llama 4 Released with 1M Context Window".into(),
source: "Meta AI Blog".into(),
summary: "Meta releases Llama 4 with a 1 million token context window.".into(),
category: NewsCategory::Llm,
content: "Meta has officially released Llama 4, their latest \
open-weight large language model featuring a groundbreaking \
1 million token context window. This represents a major \
leap in context length capabilities."
.into(),
category: "AI".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-18".into(),
@@ -57,7 +89,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "EU AI Act Enforcement Begins".into(),
source: "TechCrunch".into(),
summary: "The EU AI Act enters its enforcement phase across member states.".into(),
category: NewsCategory::Privacy,
content: "The EU AI Act has officially entered its enforcement \
phase. Member states are now required to comply with the \
comprehensive regulatory framework governing AI systems."
.into(),
category: "Privacy".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-17".into(),
@@ -66,7 +102,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "LangChain v0.4 Introduces Native MCP Support".into(),
source: "LangChain Blog".into(),
summary: "New version adds first-class MCP server integration.".into(),
category: NewsCategory::Agents,
content: "LangChain v0.4 introduces native Model Context Protocol \
support, enabling seamless integration with MCP servers for \
tool use and context management in agent workflows."
.into(),
category: "Technology".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-16".into(),
@@ -75,7 +115,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "Ollama Adds Multi-GPU Scheduling".into(),
source: "Ollama".into(),
summary: "Run large models across multiple GPUs with automatic sharding.".into(),
category: NewsCategory::Infrastructure,
content: "Ollama now supports multi-GPU scheduling with automatic \
model sharding. Users can run models across multiple GPUs \
for improved inference performance."
.into(),
category: "Infrastructure".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-15".into(),
@@ -84,7 +128,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "Mistral Open Sources Codestral 2".into(),
source: "Mistral AI".into(),
summary: "Codestral 2 achieves state-of-the-art on HumanEval benchmarks.".into(),
category: NewsCategory::OpenSource,
content: "Mistral AI has open-sourced Codestral 2, a code \
generation model that achieves state-of-the-art results \
on HumanEval and other coding benchmarks."
.into(),
category: "Open Source".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-14".into(),
@@ -93,7 +141,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "NVIDIA Releases NeMo 3.0 Framework".into(),
source: "NVIDIA Developer".into(),
summary: "Updated framework simplifies enterprise LLM fine-tuning.".into(),
category: NewsCategory::Infrastructure,
content: "NVIDIA has released NeMo 3.0, an updated framework \
that simplifies enterprise LLM fine-tuning with improved \
distributed training capabilities."
.into(),
category: "Infrastructure".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-13".into(),
@@ -102,7 +154,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "Anthropic Claude 4 Sets New Reasoning Records".into(),
source: "Anthropic".into(),
summary: "Claude 4 achieves top scores across major reasoning benchmarks.".into(),
category: NewsCategory::Llm,
content: "Anthropic's Claude 4 has set new records across major \
reasoning benchmarks, demonstrating significant improvements \
in mathematical and logical reasoning capabilities."
.into(),
category: "AI".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-12".into(),
@@ -111,7 +167,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "CrewAI Raises $52M for Agent Orchestration".into(),
source: "VentureBeat".into(),
summary: "Series B funding to expand multi-agent orchestration platform.".into(),
category: NewsCategory::Agents,
content: "CrewAI has raised $52M in Series B funding to expand \
its multi-agent orchestration platform, enabling teams \
to build and deploy complex AI agent workflows."
.into(),
category: "Technology".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-11".into(),
@@ -120,7 +180,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "DeepSeek V4 Released Under Apache 2.0".into(),
source: "DeepSeek".into(),
summary: "Latest open-weight model competes with proprietary offerings.".into(),
category: NewsCategory::OpenSource,
content: "DeepSeek has released V4 under the Apache 2.0 license, \
an open-weight model that competes with proprietary \
offerings in both performance and efficiency."
.into(),
category: "Open Source".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-10".into(),
@@ -129,7 +193,11 @@ pub fn mock_news() -> Vec<NewsCardModel> {
title: "GDPR Fines for AI Training Data Reach Record High".into(),
source: "Reuters".into(),
summary: "European regulators issue largest penalties yet for AI data misuse.".into(),
category: NewsCategory::Privacy,
content: "European regulators have issued record-high GDPR fines \
for AI training data misuse, signaling stricter enforcement \
of data protection laws in the AI sector."
.into(),
category: "Privacy".into(),
url: "#".into(),
thumbnail_url: None,
published_at: "2026-02-09".into(),

324
src/infrastructure/llm.rs Normal file
View File

@@ -0,0 +1,324 @@
use dioxus::prelude::*;
#[cfg(feature = "server")]
mod inner {
use serde::{Deserialize, Serialize};
/// A single message in the OpenAI-compatible chat format used by Ollama.
#[derive(Serialize)]
pub(super) struct ChatMessage {
pub role: String,
pub content: String,
}
/// Request body for Ollama's OpenAI-compatible chat completions endpoint.
#[derive(Serialize)]
pub(super) struct OllamaChatRequest {
pub model: String,
pub messages: Vec<ChatMessage>,
/// Disable streaming so we get a single JSON response.
pub stream: bool,
}
/// A single choice in the Ollama chat completions response.
#[derive(Deserialize)]
pub(super) struct ChatChoice {
pub message: ChatResponseMessage,
}
/// The assistant message returned inside a choice.
#[derive(Deserialize)]
pub(super) struct ChatResponseMessage {
pub content: String,
}
/// Top-level response from Ollama's `/v1/chat/completions` endpoint.
#[derive(Deserialize)]
pub(super) struct OllamaChatResponse {
pub choices: Vec<ChatChoice>,
}
/// Fetch the full text content of a webpage by downloading its HTML
/// and extracting the main article body, skipping navigation, headers,
/// footers, and sidebars.
///
/// Uses a tiered extraction strategy:
/// 1. Try content within `<article>`, `<main>`, or `[role="main"]`
/// 2. Fall back to all `<p>` tags outside excluded containers
///
/// # Arguments
///
/// * `url` - The article URL to fetch
///
/// # Returns
///
/// The extracted text, or `None` if the fetch/parse fails.
/// Text is capped at 8000 characters to stay within LLM context limits.
pub(super) async fn fetch_article_text(url: &str) -> Option<String> {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(10))
.build()
.ok()?;
let resp = client
.get(url)
.header("User-Agent", "CERTifAI/1.0 (Article Summarizer)")
.send()
.await
.ok()?;
if !resp.status().is_success() {
return None;
}
let html = resp.text().await.ok()?;
let document = scraper::Html::parse_document(&html);
// Strategy 1: Extract from semantic article containers.
// Most news sites wrap the main content in <article>, <main>,
// or an element with role="main".
let article_selector = scraper::Selector::parse("article, main, [role='main']").ok()?;
let paragraph_sel = scraper::Selector::parse("p, h1, h2, h3, li").ok()?;
let mut text_parts: Vec<String> = Vec::with_capacity(64);
for container in document.select(&article_selector) {
for element in container.select(&paragraph_sel) {
collect_text_fragment(element, &mut text_parts);
}
}
// Strategy 2: If article containers yielded little text, fall back
// to all <p> tags that are NOT inside nav/header/footer/aside.
if joined_len(&text_parts) < 200 {
text_parts.clear();
let all_p = scraper::Selector::parse("p").ok()?;
// Tags whose descendants should be excluded from extraction
const EXCLUDED_TAGS: &[&str] = &["nav", "header", "footer", "aside", "script", "style"];
for element in document.select(&all_p) {
// Walk ancestors and skip if inside an excluded container.
// Checks tag names directly to avoid ego_tree version issues.
let inside_excluded = element.ancestors().any(|ancestor| {
ancestor
.value()
.as_element()
.is_some_and(|el| EXCLUDED_TAGS.contains(&el.name.local.as_ref()))
});
if !inside_excluded {
collect_text_fragment(element, &mut text_parts);
}
}
}
let full_text = text_parts.join("\n\n");
if full_text.len() < 100 {
return None;
}
// Cap at 8000 chars to stay within reasonable LLM context
let truncated: String = full_text.chars().take(8000).collect();
Some(truncated)
}
/// Extract text from an HTML element and append it to the parts list
/// if it meets a minimum length threshold.
fn collect_text_fragment(element: scraper::ElementRef<'_>, parts: &mut Vec<String>) {
let text: String = element.text().collect::<Vec<_>>().join(" ");
let trimmed = text.trim().to_string();
// Skip very short fragments (nav items, buttons, etc.)
if trimmed.len() >= 30 {
parts.push(trimmed);
}
}
/// Sum the total character length of all collected text parts.
fn joined_len(parts: &[String]) -> usize {
parts.iter().map(|s| s.len()).sum()
}
}
/// Summarize an article using a local Ollama instance.
///
/// First attempts to fetch the full article text from the provided URL.
/// If that fails (paywall, timeout, etc.), falls back to the search snippet.
/// This mirrors how Perplexity fetches and reads source pages before answering.
///
/// # Arguments
///
/// * `snippet` - The search result snippet (fallback content)
/// * `article_url` - The original article URL to fetch full text from
/// * `ollama_url` - Base URL of the Ollama instance (e.g. "http://localhost:11434")
/// * `model` - The Ollama model ID to use (e.g. "llama3.1:8b")
///
/// # Returns
///
/// A summary string generated by the LLM, or a `ServerFnError` on failure
///
/// # Errors
///
/// Returns `ServerFnError` if the Ollama request fails or response parsing fails
#[server(endpoint = "/api/summarize")]
pub async fn summarize_article(
snippet: String,
article_url: String,
ollama_url: String,
model: String,
) -> Result<String, ServerFnError> {
dotenvy::dotenv().ok();
use inner::{fetch_article_text, ChatMessage, OllamaChatRequest, OllamaChatResponse};
// Fall back to env var or default if the URL is empty
let base_url = if ollama_url.is_empty() {
std::env::var("OLLAMA_URL").unwrap_or_else(|_| "http://localhost:11434".into())
} else {
ollama_url
};
// Fall back to env var or default if the model is empty
let model = if model.is_empty() {
std::env::var("OLLAMA_MODEL").unwrap_or_else(|_| "llama3.1:8b".into())
} else {
model
};
// Try to fetch the full article; fall back to the search snippet
let article_text = fetch_article_text(&article_url).await.unwrap_or(snippet);
let request_body = OllamaChatRequest {
model,
stream: false,
messages: vec![ChatMessage {
role: "user".into(),
content: format!(
"You are a news summarizer. Summarize the following article text \
in 2-3 concise paragraphs. Focus only on the key points and \
implications. Do NOT comment on the source, the date, the URL, \
the formatting, or whether the content seems complete or not. \
Just summarize whatever content is provided.\n\n\
{article_text}"
),
}],
};
let url = format!("{}/v1/chat/completions", base_url.trim_end_matches('/'));
let client = reqwest::Client::new();
let resp = client
.post(&url)
.header("content-type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| ServerFnError::new(format!("Ollama request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().await.unwrap_or_default();
return Err(ServerFnError::new(format!(
"Ollama returned {status}: {body}"
)));
}
let body: OllamaChatResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(format!("Failed to parse Ollama response: {e}")))?;
body.choices
.first()
.map(|choice| choice.message.content.clone())
.ok_or_else(|| ServerFnError::new("Empty response from Ollama"))
}
/// A lightweight chat message for the follow-up conversation.
/// Uses simple String role ("system"/"user"/"assistant") for Ollama compatibility.
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct FollowUpMessage {
pub role: String,
pub content: String,
}
/// Send a follow-up question about an article using a local Ollama instance.
///
/// Accepts the full conversation history (system context + prior turns) and
/// returns the assistant's next response. The system message should contain
/// the article text and summary so the LLM has full context.
///
/// # Arguments
///
/// * `messages` - The conversation history including system context
/// * `ollama_url` - Base URL of the Ollama instance
/// * `model` - The Ollama model ID to use
///
/// # Returns
///
/// The assistant's response text, or a `ServerFnError` on failure
///
/// # Errors
///
/// Returns `ServerFnError` if the Ollama request fails or response parsing fails
#[server(endpoint = "/api/chat")]
pub async fn chat_followup(
messages: Vec<FollowUpMessage>,
ollama_url: String,
model: String,
) -> Result<String, ServerFnError> {
dotenvy::dotenv().ok();
use inner::{ChatMessage, OllamaChatRequest, OllamaChatResponse};
let base_url = if ollama_url.is_empty() {
std::env::var("OLLAMA_URL").unwrap_or_else(|_| "http://localhost:11434".into())
} else {
ollama_url
};
let model = if model.is_empty() {
std::env::var("OLLAMA_MODEL").unwrap_or_else(|_| "llama3.1:8b".into())
} else {
model
};
// Convert FollowUpMessage to inner ChatMessage for the request
let chat_messages: Vec<ChatMessage> = messages
.into_iter()
.map(|m| ChatMessage {
role: m.role,
content: m.content,
})
.collect();
let request_body = OllamaChatRequest {
model,
stream: false,
messages: chat_messages,
};
let url = format!("{}/v1/chat/completions", base_url.trim_end_matches('/'));
let client = reqwest::Client::new();
let resp = client
.post(&url)
.header("content-type", "application/json")
.json(&request_body)
.send()
.await
.map_err(|e| ServerFnError::new(format!("Ollama request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let body = resp.text().await.unwrap_or_default();
return Err(ServerFnError::new(format!(
"Ollama returned {status}: {body}"
)));
}
let body: OllamaChatResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(format!("Failed to parse Ollama response: {e}")))?;
body.choices
.first()
.map(|choice| choice.message.content.clone())
.ok_or_else(|| ServerFnError::new("Empty response from Ollama"))
}

View File

@@ -1,10 +1,24 @@
#![cfg(feature = "server")]
// Server function modules (compiled for both web and server features;
// the #[server] macro generates client stubs for the web target)
pub mod llm;
pub mod ollama;
pub mod searxng;
// Server-only modules (Axum handlers, state, etc.)
#[cfg(feature = "server")]
mod auth;
#[cfg(feature = "server")]
mod error;
#[cfg(feature = "server")]
mod server;
#[cfg(feature = "server")]
mod state;
#[cfg(feature = "server")]
pub use auth::*;
#[cfg(feature = "server")]
pub use error::*;
#[cfg(feature = "server")]
pub use server::*;
#[cfg(feature = "server")]
pub use state::*;

View File

@@ -0,0 +1,91 @@
use dioxus::prelude::*;
use serde::{Deserialize, Serialize};
/// Status of a local Ollama instance, including connectivity and loaded models.
///
/// # Fields
///
/// * `online` - Whether the Ollama API responded successfully
/// * `models` - List of model names currently available on the instance
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OllamaStatus {
pub online: bool,
pub models: Vec<String>,
}
/// Response from Ollama's `GET /api/tags` endpoint.
#[cfg(feature = "server")]
#[derive(Deserialize)]
struct OllamaTagsResponse {
models: Vec<OllamaModel>,
}
/// A single model entry from Ollama's tags API.
#[cfg(feature = "server")]
#[derive(Deserialize)]
struct OllamaModel {
name: String,
}
/// Check the status of a local Ollama instance by querying its tags endpoint.
///
/// Calls `GET <ollama_url>/api/tags` to list available models and determine
/// whether the instance is reachable.
///
/// # Arguments
///
/// * `ollama_url` - Base URL of the Ollama instance (e.g. "http://localhost:11434")
///
/// # Returns
///
/// An `OllamaStatus` with `online: true` and model names if reachable,
/// or `online: false` with an empty model list on failure
///
/// # Errors
///
/// Returns `ServerFnError` only on serialization issues; network failures
/// are caught and returned as `online: false`
#[server(endpoint = "/api/ollama-status")]
pub async fn get_ollama_status(ollama_url: String) -> Result<OllamaStatus, ServerFnError> {
dotenvy::dotenv().ok();
let base_url = if ollama_url.is_empty() {
std::env::var("OLLAMA_URL").unwrap_or_else(|_| "http://localhost:11434".into())
} else {
ollama_url
};
let url = format!("{}/api/tags", base_url.trim_end_matches('/'));
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(5))
.build()
.map_err(|e| ServerFnError::new(format!("HTTP client error: {e}")))?;
let resp = match client.get(&url).send().await {
Ok(r) if r.status().is_success() => r,
_ => {
return Ok(OllamaStatus {
online: false,
models: Vec::new(),
});
}
};
let body: OllamaTagsResponse = match resp.json().await {
Ok(b) => b,
Err(_) => {
return Ok(OllamaStatus {
online: true,
models: Vec::new(),
});
}
};
let models = body.models.into_iter().map(|m| m.name).collect();
Ok(OllamaStatus {
online: true,
models,
})
}

View File

@@ -0,0 +1,285 @@
use crate::models::NewsCard;
use dioxus::prelude::*;
// Server-side helpers and types are only needed for the server build.
// The #[server] macro generates a client stub for the web build that
// sends a network request instead of executing this function body.
#[cfg(feature = "server")]
mod inner {
use serde::Deserialize;
use std::collections::HashSet;
/// Individual result from the SearXNG search API.
#[derive(Debug, Deserialize)]
pub(super) struct SearxngResult {
pub title: String,
pub url: String,
pub content: Option<String>,
#[serde(rename = "publishedDate")]
pub published_date: Option<String>,
pub thumbnail: Option<String>,
/// Relevance score assigned by SearXNG (higher = more relevant).
#[serde(default)]
pub score: f64,
}
/// Top-level response from the SearXNG search API.
#[derive(Debug, Deserialize)]
pub(super) struct SearxngResponse {
pub results: Vec<SearxngResult>,
}
/// Extract the domain name from a URL to use as the source label.
///
/// Strips common prefixes like "www." for cleaner display.
///
/// # Arguments
///
/// * `url_str` - The full URL string
///
/// # Returns
///
/// The domain host or a fallback "Web" string
pub(super) fn extract_source(url_str: &str) -> String {
url::Url::parse(url_str)
.ok()
.and_then(|u| u.host_str().map(String::from))
.map(|host| host.strip_prefix("www.").unwrap_or(&host).to_string())
.unwrap_or_else(|| "Web".into())
}
/// Deduplicate and rank search results for quality, similar to Perplexity.
///
/// Applies the following filters in order:
/// 1. Remove results with empty content (no snippet = low value)
/// 2. Deduplicate by domain (keep highest-scored result per domain)
/// 3. Sort by SearXNG relevance score (descending)
/// 4. Cap at `max_results`
///
/// # Arguments
///
/// * `results` - Raw search results from SearXNG
/// * `max_results` - Maximum number of results to return
///
/// # Returns
///
/// Filtered, deduplicated, and ranked results
pub(super) fn rank_and_deduplicate(
mut results: Vec<SearxngResult>,
max_results: usize,
) -> Vec<SearxngResult> {
// Filter out results with no meaningful content
results.retain(|r| r.content.as_ref().is_some_and(|c| c.trim().len() >= 20));
// Sort by score descending so we keep the best result per domain
results.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
// Deduplicate by domain: keep only the first (highest-scored) per domain
let mut seen_domains = HashSet::new();
results.retain(|r| {
let domain = extract_source(&r.url);
seen_domains.insert(domain)
});
results.truncate(max_results);
results
}
}
/// Search for news using the SearXNG meta-search engine.
///
/// Uses Perplexity-style query enrichment and result ranking:
/// - Queries the "news" and "general" categories for fresh, relevant results
/// - Filters to the last month for recency
/// - Deduplicates by domain for source diversity
/// - Ranks by SearXNG relevance score
/// - Filters out results without meaningful content
///
/// # Arguments
///
/// * `query` - The search query string
///
/// # Returns
///
/// Up to 15 high-quality `NewsCard` results, or a `ServerFnError` on failure
///
/// # Errors
///
/// Returns `ServerFnError` if the SearXNG request fails or response parsing fails
#[server(endpoint = "/api/search")]
pub async fn search_topic(query: String) -> Result<Vec<NewsCard>, ServerFnError> {
dotenvy::dotenv().ok();
use inner::{extract_source, rank_and_deduplicate, SearxngResponse};
let searxng_url =
std::env::var("SEARXNG_URL").unwrap_or_else(|_| "http://localhost:8888".into());
// Enrich the query with "latest news" context for better results,
// similar to how Perplexity reformulates queries before searching.
let enriched_query = format!("{query} latest news");
// Build URL with query parameters using the url crate's encoder
// to avoid reqwest version conflicts between our dep and dioxus's.
// Key SearXNG params:
// categories=news,general - prioritize news sources + supplement with general
// time_range=month - only recent results (last 30 days)
// language=en - English results
// format=json - machine-readable output
let encoded_query: String =
url::form_urlencoded::byte_serialize(enriched_query.as_bytes()).collect();
let search_url = format!(
"{searxng_url}/search?q={encoded_query}&format=json&language=en\
&categories=news,general&time_range=month"
);
let client = reqwest::Client::new();
let resp = client
.get(&search_url)
.send()
.await
.map_err(|e| ServerFnError::new(format!("SearXNG request failed: {e}")))?;
if !resp.status().is_success() {
return Err(ServerFnError::new(format!(
"SearXNG returned status {}",
resp.status()
)));
}
let body: SearxngResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(format!("Failed to parse SearXNG response: {e}")))?;
// Apply Perplexity-style ranking: filter empties, deduplicate domains, sort by score
let ranked = rank_and_deduplicate(body.results, 15);
let cards: Vec<NewsCard> = ranked
.into_iter()
.map(|r| {
let summary = r
.content
.clone()
.unwrap_or_default()
.chars()
.take(200)
.collect::<String>();
let content = r.content.unwrap_or_default();
NewsCard {
title: r.title,
source: extract_source(&r.url),
summary,
content,
category: query.clone(),
url: r.url,
thumbnail_url: r.thumbnail,
published_at: r.published_date.unwrap_or_else(|| "Recent".into()),
}
})
.collect();
Ok(cards)
}
/// Fetch trending topic keywords by running a broad news search and
/// extracting the most frequent meaningful terms from result titles.
///
/// This approach works regardless of whether SearXNG has autocomplete
/// configured, since it uses the standard search API.
///
/// # Returns
///
/// Up to 8 trending keyword strings, or a `ServerFnError` on failure
///
/// # Errors
///
/// Returns `ServerFnError` if the SearXNG search request fails
#[server(endpoint = "/api/trending")]
pub async fn get_trending_topics() -> Result<Vec<String>, ServerFnError> {
dotenvy::dotenv().ok();
use inner::SearxngResponse;
use std::collections::HashMap;
let searxng_url =
std::env::var("SEARXNG_URL").unwrap_or_else(|_| "http://localhost:8888".into());
let encoded_query: String =
url::form_urlencoded::byte_serialize(b"trending technology AI").collect();
let search_url = format!(
"{searxng_url}/search?q={encoded_query}&format=json&language=en\
&categories=news&time_range=week"
);
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(5))
.build()
.map_err(|e| ServerFnError::new(format!("HTTP client error: {e}")))?;
let resp = client
.get(&search_url)
.send()
.await
.map_err(|e| ServerFnError::new(format!("SearXNG trending search failed: {e}")))?;
if !resp.status().is_success() {
return Err(ServerFnError::new(format!(
"SearXNG trending search returned status {}",
resp.status()
)));
}
let body: SearxngResponse = resp
.json()
.await
.map_err(|e| ServerFnError::new(format!("Failed to parse trending response: {e}")))?;
// Common stop words to exclude from trending keywords
const STOP_WORDS: &[&str] = &[
"the", "a", "an", "and", "or", "but", "in", "on", "at", "to", "for", "of", "with", "by",
"from", "is", "are", "was", "were", "be", "been", "has", "have", "had", "do", "does",
"did", "will", "would", "could", "should", "may", "can", "not", "no", "it", "its", "this",
"that", "these", "how", "what", "why", "who", "when", "new", "says", "said", "about",
"after", "over", "into", "up", "out", "as", "all", "more", "than", "just", "now", "also",
"us", "we", "you", "your", "our", "if", "so", "like", "get", "make", "year", "years",
"one", "two",
];
// Count word frequency across all result titles. Words are lowercased
// and must be at least 3 characters to filter out noise.
let mut word_counts: HashMap<String, u32> = HashMap::new();
for result in &body.results {
for word in result.title.split_whitespace() {
// Strip punctuation from edges, lowercase
let clean: String = word
.trim_matches(|c: char| !c.is_alphanumeric())
.to_lowercase();
if clean.len() >= 3 && !STOP_WORDS.contains(&clean.as_str()) {
*word_counts.entry(clean).or_insert(0) += 1;
}
}
}
// Sort by frequency descending, take top 8
let mut sorted: Vec<(String, u32)> = word_counts.into_iter().collect();
sorted.sort_by(|a, b| b.1.cmp(&a.1));
// Capitalize first letter for display
let topics: Vec<String> = sorted
.into_iter()
.filter(|(_, count)| *count >= 2)
.take(8)
.map(|(word, _)| {
let mut chars = word.chars();
match chars.next() {
Some(c) => c.to_uppercase().to_string() + chars.as_str(),
None => word,
}
})
.collect();
Ok(topics)
}

View File

@@ -1,44 +1,5 @@
use serde::{Deserialize, Serialize};
/// Categories for classifying AI news articles.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum NewsCategory {
/// Large language model announcements and updates
Llm,
/// AI agent frameworks and tooling
Agents,
/// Data privacy and regulatory compliance
Privacy,
/// AI infrastructure and deployment
Infrastructure,
/// Open-source AI project releases
OpenSource,
}
impl NewsCategory {
/// Returns the display label for a news category.
pub fn label(&self) -> &'static str {
match self {
Self::Llm => "LLM",
Self::Agents => "Agents",
Self::Privacy => "Privacy",
Self::Infrastructure => "Infrastructure",
Self::OpenSource => "Open Source",
}
}
/// Returns the CSS class suffix for styling category badges.
pub fn css_class(&self) -> &'static str {
match self {
Self::Llm => "llm",
Self::Agents => "agents",
Self::Privacy => "privacy",
Self::Infrastructure => "infrastructure",
Self::OpenSource => "open-source",
}
}
}
/// A single news feed card representing an AI-related article.
///
/// # Fields
@@ -46,7 +7,8 @@ impl NewsCategory {
/// * `title` - Headline of the article
/// * `source` - Publishing outlet or author
/// * `summary` - Brief summary text
/// * `category` - Classification category
/// * `content` - Full content snippet from search results
/// * `category` - Display label for the search topic (e.g. "AI", "Finance")
/// * `url` - Link to the full article
/// * `thumbnail_url` - Optional thumbnail image URL
/// * `published_at` - ISO 8601 date string
@@ -55,7 +17,8 @@ pub struct NewsCard {
pub title: String,
pub source: String,
pub summary: String,
pub category: NewsCategory,
pub content: String,
pub category: String,
pub url: String,
pub thumbnail_url: Option<String>,
pub published_at: String,

View File

@@ -1,40 +1,131 @@
use dioxus::prelude::*;
use dioxus_sdk::storage::use_persistent;
use crate::components::{NewsCardView, PageHeader};
use crate::models::NewsCategory;
use crate::components::{ArticleDetail, DashboardSidebar, NewsCardView, PageHeader};
use crate::infrastructure::llm::FollowUpMessage;
use crate::models::NewsCard;
/// Dashboard page displaying an AI news feed grid with category filters.
/// Maximum number of recent searches to retain in localStorage.
const MAX_RECENT_SEARCHES: usize = 10;
/// Default search topics shown on the dashboard, inspired by Perplexica.
const DEFAULT_TOPICS: &[&str] = &[
"AI",
"Technology",
"Science",
"Finance",
"Writing",
"Research",
];
/// Dashboard page displaying AI news from SearXNG with topic-based filtering,
/// a split-view article detail panel, and LLM-powered summarization.
///
/// Replaces the previous `OverviewPage`. Shows mock news items
/// that will eventually be sourced from the SearXNG instance.
/// State is persisted across sessions using localStorage:
/// - `certifai_topics`: custom user-defined search topics
/// - `certifai_ollama_url`: Ollama instance URL for summarization
/// - `certifai_ollama_model`: Ollama model ID for summarization
#[component]
pub fn DashboardPage() -> Element {
let news = use_signal(crate::components::news_card::mock_news);
let mut active_filter = use_signal(|| Option::<NewsCategory>::None);
// Persistent state stored in localStorage
let mut custom_topics = use_persistent("certifai_topics".to_string(), Vec::<String>::new);
// Default to empty so the server functions use OLLAMA_URL / OLLAMA_MODEL
// from .env. Only stores a non-empty value when the user explicitly saves
// an override via the Settings panel.
let mut ollama_url = use_persistent("certifai_ollama_url".to_string(), String::new);
let mut ollama_model = use_persistent("certifai_ollama_model".to_string(), String::new);
// Collect filtered news items based on active category filter
let filtered: Vec<_> = {
let items = news.read();
let filter = active_filter.read();
match &*filter {
Some(cat) => items
.iter()
.filter(|n| n.category == *cat)
.cloned()
.collect(),
None => items.clone(),
// Reactive signals for UI state
let mut active_topic = use_signal(|| "AI".to_string());
let mut selected_card = use_signal(|| Option::<NewsCard>::None);
let mut summary = use_signal(|| Option::<String>::None);
let mut is_summarizing = use_signal(|| false);
let mut show_add_input = use_signal(|| false);
let mut new_topic_text = use_signal(String::new);
let mut show_settings = use_signal(|| false);
let mut settings_url = use_signal(String::new);
let mut settings_model = use_signal(String::new);
// Chat follow-up state
let mut chat_messages = use_signal(Vec::<FollowUpMessage>::new);
let mut is_chatting = use_signal(|| false);
// Stores the article text context for the chat system message
let mut article_context = use_signal(String::new);
// Recent search history, persisted in localStorage (capped at MAX_RECENT_SEARCHES)
let mut recent_searches =
use_persistent("certifai_recent_searches".to_string(), Vec::<String>::new);
// Build the complete topic list: defaults + custom
let all_topics: Vec<String> = {
let custom = custom_topics.read();
let mut topics: Vec<String> = DEFAULT_TOPICS.iter().map(|s| (*s).to_string()).collect();
for t in custom.iter() {
if !topics.contains(t) {
topics.push(t.clone());
}
}
topics
};
// All available filter categories
let categories = [
("All", None),
("LLM", Some(NewsCategory::Llm)),
("Agents", Some(NewsCategory::Agents)),
("Privacy", Some(NewsCategory::Privacy)),
("Infrastructure", Some(NewsCategory::Infrastructure)),
("Open Source", Some(NewsCategory::OpenSource)),
];
// Fetch trending topics once on mount (no signal deps = runs once).
// use_resource handles deduplication and won't re-fetch on re-renders.
let trending_resource = use_resource(|| async {
match crate::infrastructure::searxng::get_trending_topics().await {
Ok(topics) => topics,
Err(e) => {
tracing::error!("Failed to fetch trending topics: {e}");
Vec::new()
}
}
});
// Push a topic to the front of recent searches (deduplicating, capped).
// Defined as a closure so it can be called from multiple click handlers.
let mut record_search = move |topic: &str| {
let mut searches = recent_searches.read().clone();
searches.retain(|t| t != topic);
searches.insert(0, topic.to_string());
searches.truncate(MAX_RECENT_SEARCHES);
*recent_searches.write() = searches;
};
// Fetch news reactively when active_topic changes.
// use_resource tracks the signal read inside the closure and only
// re-fetches when active_topic actually changes -- unlike use_effect
// which can re-fire on unrelated re-renders.
let search_resource = use_resource(move || {
let topic = active_topic.read().clone();
async move { crate::infrastructure::searxng::search_topic(topic).await }
});
// Check if an article is selected for split view
let has_selection = selected_card.read().is_some();
let container_class = if has_selection {
"dashboard-split"
} else {
"dashboard-with-sidebar"
};
// Resolve trending from resource (empty while loading / on error)
let trending_topics: Vec<String> = trending_resource
.read()
.as_ref()
.cloned()
.unwrap_or_default();
// Resolve search state from resource
let search_state = search_resource.read();
let is_loading = search_state.is_none();
let search_error: Option<String> = search_state
.as_ref()
.and_then(|r| r.as_ref().err().map(|e| format!("Search failed: {e}")));
let news_cards: Vec<NewsCard> = match search_state.as_ref() {
Some(Ok(c)) => c.clone(),
Some(Err(_)) => crate::components::news_card::mock_news(),
None => Vec::new(),
};
// Drop the borrow before entering rsx! so signals can be written in handlers
drop(search_state);
rsx! {
section { class: "dashboard-page",
@@ -42,24 +133,308 @@ pub fn DashboardPage() -> Element {
title: "Dashboard".to_string(),
subtitle: "AI news and updates".to_string(),
}
// Topic tabs row
div { class: "dashboard-filters",
for (label , cat) in categories {
for topic in &all_topics {
{
let is_active = *active_filter.read() == cat;
let class = if is_active {
let is_active = *active_topic.read() == *topic;
let class_name = if is_active {
"filter-tab filter-tab--active"
} else {
"filter-tab"
};
let is_custom = !DEFAULT_TOPICS.contains(&topic.as_str());
let topic_click = topic.clone();
let topic_remove = topic.clone();
rsx! {
button { class: "{class}", onclick: move |_| active_filter.set(cat.clone()), "{label}" }
div { class: "topic-tab-wrapper",
button {
class: "{class_name}",
onclick: move |_| {
record_search(&topic_click);
active_topic.set(topic_click.clone());
selected_card.set(None);
summary.set(None);
},
"{topic}"
}
if is_custom {
button {
class: "topic-remove",
onclick: move |_| {
let mut topics = custom_topics.read().clone();
topics.retain(|t| *t != topic_remove);
*custom_topics.write() = topics;
// If we removed the active topic, reset
if *active_topic.read() == topic_remove {
active_topic.set("AI".to_string());
}
},
"x"
}
}
}
}
}
}
// Add topic button / inline input
if *show_add_input.read() {
div { class: "topic-input-wrapper",
input {
class: "topic-input",
r#type: "text",
placeholder: "Topic name...",
value: "{new_topic_text}",
oninput: move |e| new_topic_text.set(e.value()),
onkeypress: move |e| {
if e.key() == Key::Enter {
let val = new_topic_text.read().trim().to_string();
if !val.is_empty() {
let mut topics = custom_topics.read().clone();
if !topics.contains(&val) && !DEFAULT_TOPICS.contains(&val.as_str()) {
topics.push(val.clone());
*custom_topics.write() = topics;
record_search(&val);
active_topic.set(val);
}
}
new_topic_text.set(String::new());
show_add_input.set(false);
}
},
}
button {
class: "topic-cancel-btn",
onclick: move |_| {
show_add_input.set(false);
new_topic_text.set(String::new());
},
"Cancel"
}
}
} else {
button {
class: "topic-add-btn",
onclick: move |_| show_add_input.set(true),
"+"
}
}
// Settings toggle
button {
class: "filter-tab settings-toggle",
onclick: move |_| {
let currently_shown = *show_settings.read();
if !currently_shown {
settings_url.set(ollama_url.read().clone());
settings_model.set(ollama_model.read().clone());
}
show_settings.set(!currently_shown);
},
"Settings"
}
}
div { class: "news-grid",
for card in filtered {
NewsCardView { key: "{card.title}", card }
// Settings panel (collapsible)
if *show_settings.read() {
div { class: "settings-panel",
h4 { class: "settings-panel-title", "Ollama Settings" }
p { class: "settings-hint",
"Leave empty to use OLLAMA_URL / OLLAMA_MODEL from .env"
}
div { class: "settings-field",
label { "Ollama URL" }
input {
class: "settings-input",
r#type: "text",
placeholder: "Uses OLLAMA_URL from .env",
value: "{settings_url}",
oninput: move |e| settings_url.set(e.value()),
}
}
div { class: "settings-field",
label { "Model" }
input {
class: "settings-input",
r#type: "text",
placeholder: "Uses OLLAMA_MODEL from .env",
value: "{settings_model}",
oninput: move |e| settings_model.set(e.value()),
}
}
button {
class: "btn btn-primary",
onclick: move |_| {
*ollama_url.write() = settings_url.read().trim().to_string();
*ollama_model.write() = settings_model.read().trim().to_string();
show_settings.set(false);
},
"Save"
}
}
}
// Loading / error state
if is_loading {
div { class: "dashboard-loading", "Searching..." }
}
if let Some(ref err) = search_error {
div { class: "settings-hint", "{err}" }
}
// Main content area: grid + optional detail panel
div { class: "{container_class}",
// Left: news grid
div { class: if has_selection { "dashboard-left" } else { "dashboard-full-grid" },
div { class: if has_selection { "news-grid news-grid--compact" } else { "news-grid" },
for card in news_cards.iter() {
{
let is_selected = selected_card
// Auto-summarize on card selection
.read()
// Store context for follow-up chat
.as_ref()
.is_some_and(|s| s.url == card.url && s.title == card.title);
rsx! {
NewsCardView {
key: "{card.title}-{card.url}",
card: card.clone(),
selected: is_selected,
on_click: move |c: NewsCard| {
let snippet = c.content.clone();
let article_url = c.url.clone();
selected_card.set(Some(c));
summary.set(None);
chat_messages.set(Vec::new());
article_context.set(String::new());
let oll_url = ollama_url.read().clone();
let mdl = ollama_model.read().clone();
spawn(async move {
is_summarizing.set(true);
match crate::infrastructure::llm::summarize_article(
snippet.clone(),
article_url,
oll_url,
mdl,
)
.await
{
Ok(text) => {
article_context
.set(
format!(
"Article content:\n{snippet}\n\n\
AI Summary:\n{text}",
),
);
summary.set(Some(text));
}
Err(e) => {
tracing::error!("Summarization failed: {e}");
summary.set(Some(format!("Summarization failed: {e}")));
}
}
is_summarizing.set(false);
});
},
}
}
}
}
}
}
// Right: article detail panel (when card selected)
if let Some(ref card) = *selected_card.read() {
div { class: "dashboard-right",
ArticleDetail {
card: card.clone(),
on_close: move |_| {
selected_card.set(None);
summary.set(None);
chat_messages.set(Vec::new());
},
summary: summary.read().clone(),
is_summarizing: *is_summarizing.read(),
chat_messages: chat_messages.read().clone(),
is_chatting: *is_chatting.read(),
on_chat_send: move |question: String| {
let oll_url = ollama_url.read().clone();
let mdl = ollama_model.read().clone();
let ctx = article_context.read().clone();
// Append user message to chat
chat_messages
// Build full message history for Ollama
.write()
.push(FollowUpMessage {
role: "user".into(),
content: question,
});
let msgs = {
let history = chat_messages.read();
let mut all = vec![
FollowUpMessage {
role: "system".into(),
content: format!(
"You are a helpful assistant. The user is reading \
a news article. Use the following context to answer \
their questions. Do NOT comment on the source, \
dates, URLs, or formatting.\n\n{ctx}",
),
},
];
all.extend(history.iter().cloned());
all
};
spawn(async move {
is_chatting.set(true);
match crate::infrastructure::llm::chat_followup(msgs, oll_url, mdl).await {
Ok(reply) => {
chat_messages
.write()
.push(FollowUpMessage {
role: "assistant".into(),
content: reply,
});
}
Err(e) => {
tracing::error!("Chat failed: {e}");
chat_messages
.write()
.push(FollowUpMessage {
role: "assistant".into(),
content: format!("Error: {e}"),
});
}
}
is_chatting.set(false);
});
},
}
}
}
// Right: sidebar (when no card selected)
if !has_selection {
DashboardSidebar {
ollama_url: ollama_url.read().clone(),
trending: trending_topics.clone(),
recent_searches: recent_searches.read().clone(),
on_topic_click: move |topic: String| {
record_search(&topic);
active_topic.set(topic);
selected_card.set(None);
summary.set(None);
},
}
}
}
}