diff --git a/.env.example b/.env.example
index bc49c38..6182d8f 100644
--- a/.env.example
+++ b/.env.example
@@ -39,6 +39,11 @@ SEARXNG_URL=http://localhost:8888
OLLAMA_URL=http://localhost:11434
OLLAMA_MODEL=llama3.1:8b
+# ---------------------------------------------------------------------------
+# LibreChat (external chat via SSO) [OPTIONAL - default: http://localhost:3080]
+# ---------------------------------------------------------------------------
+LIBRECHAT_URL=http://localhost:3080
+
# ---------------------------------------------------------------------------
# LLM Providers (comma-separated list) [OPTIONAL]
# ---------------------------------------------------------------------------
diff --git a/assets/i18n/de.json b/assets/i18n/de.json
index 4ca034c..515c528 100644
--- a/assets/i18n/de.json
+++ b/assets/i18n/de.json
@@ -38,8 +38,6 @@
"dashboard": "Dashboard",
"providers": "Provider",
"chat": "Chat",
- "tools": "Werkzeuge",
- "knowledge_base": "Wissensdatenbank",
"developer": "Entwickler",
"organization": "Organisation",
"switch_light": "Zum hellen Modus wechseln",
@@ -72,28 +70,6 @@
"trending": "Im Trend",
"recent_searches": "Letzte Suchen"
},
- "chat": {
- "new_chat": "Neuer Chat",
- "general": "Allgemein",
- "conversations": "Unterhaltungen",
- "news_chats": "Nachrichten-Chats",
- "all_chats": "Alle Chats",
- "no_conversations": "Noch keine Unterhaltungen",
- "type_message": "Nachricht eingeben...",
- "model_label": "Modell:",
- "no_models": "Keine Modelle verfuegbar",
- "send_to_start": "Senden Sie eine Nachricht, um die Unterhaltung zu starten.",
- "you": "Sie",
- "assistant": "Assistent",
- "thinking": "Denkt nach...",
- "copy_response": "Letzte Antwort kopieren",
- "copy_conversation": "Unterhaltung kopieren",
- "edit_last": "Letzte Nachricht bearbeiten",
- "just_now": "gerade eben",
- "minutes_ago": "vor {n} Min.",
- "hours_ago": "vor {n} Std.",
- "days_ago": "vor {n} T."
- },
"providers": {
"title": "Provider",
"subtitle": "Konfigurieren Sie Ihre LLM- und Embedding-Backends",
@@ -107,37 +83,6 @@
"active_config": "Aktive Konfiguration",
"embedding": "Embedding"
},
- "tools": {
- "title": "Werkzeuge",
- "subtitle": "MCP-Server und Werkzeugintegrationen verwalten",
- "calculator": "Taschenrechner",
- "calculator_desc": "Mathematische Berechnungen und Einheitenumrechnung",
- "tavily": "Tavily-Suche",
- "tavily_desc": "KI-optimierte Websuche-API fuer Echtzeitinformationen",
- "searxng": "SearXNG",
- "searxng_desc": "Datenschutzfreundliche Metasuchmaschine",
- "file_reader": "Dateileser",
- "file_reader_desc": "Lokale Dateien in verschiedenen Formaten lesen und analysieren",
- "code_executor": "Code-Ausfuehrer",
- "code_executor_desc": "Isolierte Codeausfuehrung fuer Python und JavaScript",
- "web_scraper": "Web-Scraper",
- "web_scraper_desc": "Strukturierte Daten aus Webseiten extrahieren",
- "email_sender": "E-Mail-Versand",
- "email_sender_desc": "E-Mails ueber konfigurierten SMTP-Server versenden",
- "git_ops": "Git-Operationen",
- "git_ops_desc": "Mit Git-Repositories fuer Versionskontrolle interagieren"
- },
- "knowledge": {
- "title": "Wissensdatenbank",
- "subtitle": "Dokumente fuer RAG-Abfragen verwalten",
- "search_placeholder": "Dateien suchen...",
- "name": "Name",
- "type": "Typ",
- "size": "Groesse",
- "chunks": "Abschnitte",
- "uploaded": "Hochgeladen",
- "actions": "Aktionen"
- },
"developer": {
"agents_title": "Agent Builder",
"agents_desc": "Erstellen und verwalten Sie KI-Agenten mit LangGraph. Erstellen Sie mehrstufige Schlussfolgerungspipelines, werkzeugnutzende Agenten und autonome Workflows.",
diff --git a/assets/i18n/en.json b/assets/i18n/en.json
index 662b0b7..774f1fa 100644
--- a/assets/i18n/en.json
+++ b/assets/i18n/en.json
@@ -38,8 +38,6 @@
"dashboard": "Dashboard",
"providers": "Providers",
"chat": "Chat",
- "tools": "Tools",
- "knowledge_base": "Knowledge Base",
"developer": "Developer",
"organization": "Organization",
"switch_light": "Switch to light mode",
@@ -72,28 +70,6 @@
"trending": "Trending",
"recent_searches": "Recent Searches"
},
- "chat": {
- "new_chat": "New Chat",
- "general": "General",
- "conversations": "Conversations",
- "news_chats": "News Chats",
- "all_chats": "All Chats",
- "no_conversations": "No conversations yet",
- "type_message": "Type a message...",
- "model_label": "Model:",
- "no_models": "No models available",
- "send_to_start": "Send a message to start the conversation.",
- "you": "You",
- "assistant": "Assistant",
- "thinking": "Thinking...",
- "copy_response": "Copy last response",
- "copy_conversation": "Copy conversation",
- "edit_last": "Edit last message",
- "just_now": "just now",
- "minutes_ago": "{n}m ago",
- "hours_ago": "{n}h ago",
- "days_ago": "{n}d ago"
- },
"providers": {
"title": "Providers",
"subtitle": "Configure your LLM and embedding backends",
@@ -107,37 +83,6 @@
"active_config": "Active Configuration",
"embedding": "Embedding"
},
- "tools": {
- "title": "Tools",
- "subtitle": "Manage MCP servers and tool integrations",
- "calculator": "Calculator",
- "calculator_desc": "Mathematical computation and unit conversion",
- "tavily": "Tavily Search",
- "tavily_desc": "AI-optimized web search API for real-time information",
- "searxng": "SearXNG",
- "searxng_desc": "Privacy-respecting metasearch engine",
- "file_reader": "File Reader",
- "file_reader_desc": "Read and parse local files in various formats",
- "code_executor": "Code Executor",
- "code_executor_desc": "Sandboxed code execution for Python and JavaScript",
- "web_scraper": "Web Scraper",
- "web_scraper_desc": "Extract structured data from web pages",
- "email_sender": "Email Sender",
- "email_sender_desc": "Send emails via configured SMTP server",
- "git_ops": "Git Operations",
- "git_ops_desc": "Interact with Git repositories for version control"
- },
- "knowledge": {
- "title": "Knowledge Base",
- "subtitle": "Manage documents for RAG retrieval",
- "search_placeholder": "Search files...",
- "name": "Name",
- "type": "Type",
- "size": "Size",
- "chunks": "Chunks",
- "uploaded": "Uploaded",
- "actions": "Actions"
- },
"developer": {
"agents_title": "Agent Builder",
"agents_desc": "Build and manage AI agents with LangGraph. Create multi-step reasoning pipelines, tool-using agents, and autonomous workflows.",
diff --git a/assets/i18n/es.json b/assets/i18n/es.json
index eef7960..6a0a4b1 100644
--- a/assets/i18n/es.json
+++ b/assets/i18n/es.json
@@ -38,8 +38,6 @@
"dashboard": "Panel de control",
"providers": "Proveedores",
"chat": "Chat",
- "tools": "Herramientas",
- "knowledge_base": "Base de conocimiento",
"developer": "Desarrollador",
"organization": "Organizacion",
"switch_light": "Cambiar a modo claro",
@@ -72,28 +70,6 @@
"trending": "Tendencias",
"recent_searches": "Busquedas recientes"
},
- "chat": {
- "new_chat": "Nuevo chat",
- "general": "General",
- "conversations": "Conversaciones",
- "news_chats": "Chats de noticias",
- "all_chats": "Todos los chats",
- "no_conversations": "Aun no hay conversaciones",
- "type_message": "Escriba un mensaje...",
- "model_label": "Modelo:",
- "no_models": "No hay modelos disponibles",
- "send_to_start": "Envie un mensaje para iniciar la conversacion.",
- "you": "Usted",
- "assistant": "Asistente",
- "thinking": "Pensando...",
- "copy_response": "Copiar ultima respuesta",
- "copy_conversation": "Copiar conversacion",
- "edit_last": "Editar ultimo mensaje",
- "just_now": "justo ahora",
- "minutes_ago": "hace {n}m",
- "hours_ago": "hace {n}h",
- "days_ago": "hace {n}d"
- },
"providers": {
"title": "Proveedores",
"subtitle": "Configure sus backends de LLM y embeddings",
@@ -107,37 +83,6 @@
"active_config": "Configuracion activa",
"embedding": "Embedding"
},
- "tools": {
- "title": "Herramientas",
- "subtitle": "Gestione servidores MCP e integraciones de herramientas",
- "calculator": "Calculadora",
- "calculator_desc": "Calculo matematico y conversion de unidades",
- "tavily": "Tavily Search",
- "tavily_desc": "API de busqueda web optimizada con IA para informacion en tiempo real",
- "searxng": "SearXNG",
- "searxng_desc": "Motor de metabusqueda que respeta la privacidad",
- "file_reader": "Lector de archivos",
- "file_reader_desc": "Leer y analizar archivos locales en varios formatos",
- "code_executor": "Ejecutor de codigo",
- "code_executor_desc": "Ejecucion de codigo en entorno aislado para Python y JavaScript",
- "web_scraper": "Web Scraper",
- "web_scraper_desc": "Extraer datos estructurados de paginas web",
- "email_sender": "Envio de correo",
- "email_sender_desc": "Enviar correos electronicos a traves del servidor SMTP configurado",
- "git_ops": "Operaciones Git",
- "git_ops_desc": "Interactuar con repositorios Git para control de versiones"
- },
- "knowledge": {
- "title": "Base de conocimiento",
- "subtitle": "Gestione documentos para recuperacion RAG",
- "search_placeholder": "Buscar archivos...",
- "name": "Nombre",
- "type": "Tipo",
- "size": "Tamano",
- "chunks": "Fragmentos",
- "uploaded": "Subido",
- "actions": "Acciones"
- },
"developer": {
"agents_title": "Constructor de agentes",
"agents_desc": "Construya y gestione agentes de IA con LangGraph. Cree pipelines de razonamiento de varios pasos, agentes que utilizan herramientas y flujos de trabajo autonomos.",
diff --git a/assets/i18n/fr.json b/assets/i18n/fr.json
index 113e6f6..9ab76f1 100644
--- a/assets/i18n/fr.json
+++ b/assets/i18n/fr.json
@@ -38,8 +38,6 @@
"dashboard": "Tableau de bord",
"providers": "Fournisseurs",
"chat": "Chat",
- "tools": "Outils",
- "knowledge_base": "Base de connaissances",
"developer": "Developpeur",
"organization": "Organisation",
"switch_light": "Passer en mode clair",
@@ -72,28 +70,6 @@
"trending": "Tendances",
"recent_searches": "Recherches recentes"
},
- "chat": {
- "new_chat": "Nouvelle conversation",
- "general": "General",
- "conversations": "Conversations",
- "news_chats": "Conversations actualites",
- "all_chats": "Toutes les conversations",
- "no_conversations": "Aucune conversation pour le moment",
- "type_message": "Saisissez un message...",
- "model_label": "Modele :",
- "no_models": "Aucun modele disponible",
- "send_to_start": "Envoyez un message pour demarrer la conversation.",
- "you": "Vous",
- "assistant": "Assistant",
- "thinking": "Reflexion en cours...",
- "copy_response": "Copier la derniere reponse",
- "copy_conversation": "Copier la conversation",
- "edit_last": "Modifier le dernier message",
- "just_now": "a l'instant",
- "minutes_ago": "il y a {n} min",
- "hours_ago": "il y a {n} h",
- "days_ago": "il y a {n} j"
- },
"providers": {
"title": "Fournisseurs",
"subtitle": "Configurez vos backends LLM et d'embeddings",
@@ -107,37 +83,6 @@
"active_config": "Configuration active",
"embedding": "Embedding"
},
- "tools": {
- "title": "Outils",
- "subtitle": "Gerez les serveurs MCP et les integrations d'outils",
- "calculator": "Calculatrice",
- "calculator_desc": "Calcul mathematique et conversion d'unites",
- "tavily": "Recherche Tavily",
- "tavily_desc": "API de recherche web optimisee par IA pour des informations en temps reel",
- "searxng": "SearXNG",
- "searxng_desc": "Metamoteur de recherche respectueux de la vie privee",
- "file_reader": "Lecteur de fichiers",
- "file_reader_desc": "Lire et analyser des fichiers locaux dans divers formats",
- "code_executor": "Executeur de code",
- "code_executor_desc": "Execution de code en bac a sable pour Python et JavaScript",
- "web_scraper": "Extracteur web",
- "web_scraper_desc": "Extraire des donnees structurees a partir de pages web",
- "email_sender": "Envoi d'e-mails",
- "email_sender_desc": "Envoyer des e-mails via le serveur SMTP configure",
- "git_ops": "Operations Git",
- "git_ops_desc": "Interagir avec les depots Git pour le controle de version"
- },
- "knowledge": {
- "title": "Base de connaissances",
- "subtitle": "Gerez les documents pour la recuperation RAG",
- "search_placeholder": "Rechercher des fichiers...",
- "name": "Nom",
- "type": "Type",
- "size": "Taille",
- "chunks": "Segments",
- "uploaded": "Importe",
- "actions": "Actions"
- },
"developer": {
"agents_title": "Constructeur d'agents",
"agents_desc": "Construisez et gerez des agents IA avec LangGraph. Creez des pipelines de raisonnement multi-etapes, des agents utilisant des outils et des flux de travail autonomes.",
diff --git a/assets/i18n/pt.json b/assets/i18n/pt.json
index 85ee33e..1d4e7d4 100644
--- a/assets/i18n/pt.json
+++ b/assets/i18n/pt.json
@@ -38,8 +38,6 @@
"dashboard": "Painel",
"providers": "Fornecedores",
"chat": "Chat",
- "tools": "Ferramentas",
- "knowledge_base": "Base de Conhecimento",
"developer": "Programador",
"organization": "Organizacao",
"switch_light": "Mudar para modo claro",
@@ -72,28 +70,6 @@
"trending": "Em destaque",
"recent_searches": "Pesquisas recentes"
},
- "chat": {
- "new_chat": "Nova conversa",
- "general": "Geral",
- "conversations": "Conversas",
- "news_chats": "Conversas de noticias",
- "all_chats": "Todas as conversas",
- "no_conversations": "Ainda sem conversas",
- "type_message": "Escreva uma mensagem...",
- "model_label": "Modelo:",
- "no_models": "Nenhum modelo disponivel",
- "send_to_start": "Envie uma mensagem para iniciar a conversa.",
- "you": "Voce",
- "assistant": "Assistente",
- "thinking": "A pensar...",
- "copy_response": "Copiar ultima resposta",
- "copy_conversation": "Copiar conversa",
- "edit_last": "Editar ultima mensagem",
- "just_now": "agora mesmo",
- "minutes_ago": "ha {n}m",
- "hours_ago": "ha {n}h",
- "days_ago": "ha {n}d"
- },
"providers": {
"title": "Fornecedores",
"subtitle": "Configure os seus backends de LLM e embeddings",
@@ -107,37 +83,6 @@
"active_config": "Configuracao Ativa",
"embedding": "Embedding"
},
- "tools": {
- "title": "Ferramentas",
- "subtitle": "Gerir servidores MCP e integracoes de ferramentas",
- "calculator": "Calculadora",
- "calculator_desc": "Calculo matematico e conversao de unidades",
- "tavily": "Pesquisa Tavily",
- "tavily_desc": "API de pesquisa web otimizada por IA para informacao em tempo real",
- "searxng": "SearXNG",
- "searxng_desc": "Motor de metapesquisa que respeita a privacidade",
- "file_reader": "Leitor de Ficheiros",
- "file_reader_desc": "Ler e analisar ficheiros locais em varios formatos",
- "code_executor": "Executor de Codigo",
- "code_executor_desc": "Execucao de codigo em sandbox para Python e JavaScript",
- "web_scraper": "Web Scraper",
- "web_scraper_desc": "Extrair dados estruturados de paginas web",
- "email_sender": "Envio de Email",
- "email_sender_desc": "Enviar emails atraves do servidor SMTP configurado",
- "git_ops": "Operacoes Git",
- "git_ops_desc": "Interagir com repositorios Git para controlo de versoes"
- },
- "knowledge": {
- "title": "Base de Conhecimento",
- "subtitle": "Gerir documentos para recuperacao RAG",
- "search_placeholder": "Pesquisar ficheiros...",
- "name": "Nome",
- "type": "Tipo",
- "size": "Tamanho",
- "chunks": "Fragmentos",
- "uploaded": "Carregado",
- "actions": "Acoes"
- },
"developer": {
"agents_title": "Construtor de Agentes",
"agents_desc": "Construa e gira agentes de IA com LangGraph. Crie pipelines de raciocinio multi-etapa, agentes com ferramentas e fluxos de trabalho autonomos.",
diff --git a/docker-compose.yml b/docker-compose.yml
index 7194306..1e14865 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -40,4 +40,45 @@ services:
environment:
- SEARXNG_BASE_URL=http://localhost:8888
volumes:
- - ./searxng:/etc/searxng:rw
\ No newline at end of file
+ - ./searxng:/etc/searxng:rw
+
+ librechat:
+ image: ghcr.io/danny-avila/librechat:latest
+ container_name: certifai-librechat
+ restart: unless-stopped
+ ports:
+ - "3080:3080"
+ depends_on:
+ keycloak:
+ condition: service_healthy
+ mongo:
+ condition: service_started
+ environment:
+ # MongoDB (shared instance, separate database)
+ MONGO_URI: mongodb://root:example@mongo:27017/librechat?authSource=admin
+ # Keycloak OIDC SSO
+ OPENID_ISSUER: http://localhost:8080/realms/certifai
+ OPENID_CLIENT_ID: certifai-librechat
+ OPENID_CLIENT_SECRET: certifai-librechat-secret
+ OPENID_CALLBACK_URL: /oauth/openid/callback
+ OPENID_SCOPE: openid profile email
+ OPENID_BUTTON_LABEL: Login with CERTifAI
+ OPENID_AUTH_EXTRA_PARAMS: prompt=none
+ # Disable local auth (SSO only)
+ ALLOW_EMAIL_LOGIN: "false"
+ ALLOW_REGISTRATION: "false"
+ ALLOW_SOCIAL_LOGIN: "true"
+ ALLOW_SOCIAL_REGISTRATION: "true"
+ # App settings
+ APP_TITLE: CERTifAI Chat
+ CUSTOM_FOOTER: CERTifAI - Sovereign GenAI Infrastructure
+ HOST: 0.0.0.0
+ PORT: "3080"
+ NO_INDEX: "true"
+ volumes:
+ - ./librechat/librechat.yaml:/app/librechat.yaml:ro
+ - ./librechat/logo.svg:/app/client/public/assets/logo.svg:ro
+ - librechat-data:/app/data
+
+volumes:
+ librechat-data:
\ No newline at end of file
diff --git a/keycloak/realm-export.json b/keycloak/realm-export.json
index 7e3aa42..eb945ee 100644
--- a/keycloak/realm-export.json
+++ b/keycloak/realm-export.json
@@ -78,6 +78,39 @@
"optionalClientScopes": [
"offline_access"
]
+ },
+ {
+ "clientId": "certifai-librechat",
+ "name": "CERTifAI Chat",
+ "description": "LibreChat OIDC client for CERTifAI",
+ "enabled": true,
+ "publicClient": false,
+ "directAccessGrantsEnabled": false,
+ "standardFlowEnabled": true,
+ "implicitFlowEnabled": false,
+ "serviceAccountsEnabled": false,
+ "protocol": "openid-connect",
+ "secret": "certifai-librechat-secret",
+ "rootUrl": "http://localhost:3080",
+ "baseUrl": "http://localhost:3080",
+ "redirectUris": [
+ "http://localhost:3080/*"
+ ],
+ "webOrigins": [
+ "http://localhost:3080",
+ "http://localhost:8000"
+ ],
+ "attributes": {
+ "post.logout.redirect.uris": "http://localhost:3080"
+ },
+ "defaultClientScopes": [
+ "openid",
+ "profile",
+ "email"
+ ],
+ "optionalClientScopes": [
+ "offline_access"
+ ]
}
],
"clientScopes": [
diff --git a/librechat/librechat.yaml b/librechat/librechat.yaml
new file mode 100644
index 0000000..f79d033
--- /dev/null
+++ b/librechat/librechat.yaml
@@ -0,0 +1,35 @@
+# CERTifAI LibreChat Configuration
+# Ollama backend for self-hosted LLM inference.
+version: 1.2.1
+
+cache: true
+
+registration:
+ socialLogins:
+ - openid
+
+interface:
+ privacyPolicy:
+ externalUrl: http://localhost:8000/privacy
+ termsOfService:
+ externalUrl: http://localhost:8000/impressum
+ endpointsMenu: true
+ modelSelect: true
+ parameters: true
+
+endpoints:
+ ollama:
+ titleModel: "current_model"
+ # Use the Docker host network alias when running inside compose.
+ # Override OLLAMA_URL in .env for external Ollama instances.
+ url: "http://host.docker.internal:11434"
+ models:
+ fetch: true
+ summarize: true
+ forcePrompt: false
+ dropParams:
+ - stop
+ - user
+ - frequency_penalty
+ - presence_penalty
+ modelDisplayLabel: "CERTifAI Ollama"
diff --git a/librechat/logo.svg b/librechat/logo.svg
new file mode 100644
index 0000000..ac16408
--- /dev/null
+++ b/librechat/logo.svg
@@ -0,0 +1,25 @@
+
diff --git a/src/app.rs b/src/app.rs
index be6778c..8bce752 100644
--- a/src/app.rs
+++ b/src/app.rs
@@ -22,12 +22,6 @@ pub enum Route {
DashboardPage {},
#[route("/providers")]
ProvidersPage {},
- #[route("/chat")]
- ChatPage {},
- #[route("/tools")]
- ToolsPage {},
- #[route("/knowledge")]
- KnowledgePage {},
#[layout(DeveloperShell)]
#[route("/developer/agents")]
diff --git a/src/components/chat_action_bar.rs b/src/components/chat_action_bar.rs
deleted file mode 100644
index 0dee6be..0000000
--- a/src/components/chat_action_bar.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-use crate::i18n::{t, Locale};
-use dioxus::prelude::*;
-use dioxus_free_icons::icons::fa_solid_icons::{FaCopy, FaPenToSquare, FaShareNodes};
-
-/// Action bar displayed above the chat input with copy, share, and edit buttons.
-///
-/// Only visible when there is at least one message in the conversation.
-///
-/// # Arguments
-///
-/// * `on_copy` - Copies the last assistant response to the clipboard
-/// * `on_share` - Copies the full conversation as text to the clipboard
-/// * `on_edit` - Places the last user message back in the input for editing
-/// * `has_messages` - Whether any messages exist (hides the bar when empty)
-/// * `has_assistant_message` - Whether an assistant message exists (disables copy if not)
-/// * `has_user_message` - Whether a user message exists (disables edit if not)
-#[component]
-pub fn ChatActionBar(
- on_copy: EventHandler<()>,
- on_share: EventHandler<()>,
- on_edit: EventHandler<()>,
- has_messages: bool,
- has_assistant_message: bool,
- has_user_message: bool,
-) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- if !has_messages {
- return rsx! {};
- }
-
- rsx! {
- div { class: "chat-action-bar",
- button {
- class: "chat-action-btn",
- disabled: !has_assistant_message,
- title: "{t(l, \"chat.copy_response\")}",
- onclick: move |_| on_copy.call(()),
- dioxus_free_icons::Icon {
- icon: FaCopy,
- width: 14, height: 14,
- }
- span { class: "chat-action-label", "{t(l, \"common.copy\")}" }
- }
- button {
- class: "chat-action-btn",
- title: "{t(l, \"chat.copy_conversation\")}",
- onclick: move |_| on_share.call(()),
- dioxus_free_icons::Icon {
- icon: FaShareNodes,
- width: 14, height: 14,
- }
- span { class: "chat-action-label", "{t(l, \"common.share\")}" }
- }
- button {
- class: "chat-action-btn",
- disabled: !has_user_message,
- title: "{t(l, \"chat.edit_last\")}",
- onclick: move |_| on_edit.call(()),
- dioxus_free_icons::Icon {
- icon: FaPenToSquare,
- width: 14, height: 14,
- }
- span { class: "chat-action-label", "{t(l, \"common.edit\")}" }
- }
- }
- }
-}
diff --git a/src/components/chat_bubble.rs b/src/components/chat_bubble.rs
deleted file mode 100644
index 5007186..0000000
--- a/src/components/chat_bubble.rs
+++ /dev/null
@@ -1,142 +0,0 @@
-use crate::i18n::{t, Locale};
-use crate::models::{ChatMessage, ChatRole};
-use dioxus::prelude::*;
-
-/// Render markdown content to HTML using `pulldown-cmark`.
-///
-/// # Arguments
-///
-/// * `md` - Raw markdown string
-///
-/// # Returns
-///
-/// HTML string suitable for `dangerous_inner_html`
-fn markdown_to_html(md: &str) -> String {
- use pulldown_cmark::{Options, Parser};
-
- let mut opts = Options::empty();
- opts.insert(Options::ENABLE_TABLES);
- opts.insert(Options::ENABLE_STRIKETHROUGH);
- opts.insert(Options::ENABLE_TASKLISTS);
-
- let parser = Parser::new_ext(md, opts);
- let mut html = String::with_capacity(md.len() * 2);
- pulldown_cmark::html::push_html(&mut html, parser);
- html
-}
-
-/// Renders a single chat message bubble with role-based styling.
-///
-/// User messages are displayed as plain text, right-aligned.
-/// Assistant messages are rendered as markdown with `pulldown-cmark`.
-/// System messages are hidden from the UI.
-///
-/// # Arguments
-///
-/// * `message` - The chat message to render
-#[component]
-pub fn ChatBubble(message: ChatMessage) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- // System messages are not rendered in the UI
- if message.role == ChatRole::System {
- return rsx! {};
- }
-
- let bubble_class = match message.role {
- ChatRole::User => "chat-bubble chat-bubble--user",
- ChatRole::Assistant => "chat-bubble chat-bubble--assistant",
- ChatRole::System => unreachable!(),
- };
-
- let role_label = match message.role {
- ChatRole::User => t(l, "chat.you"),
- ChatRole::Assistant => t(l, "chat.assistant"),
- ChatRole::System => unreachable!(),
- };
-
- // Format timestamp for display (show time only if today)
- let display_time = if message.timestamp.len() >= 16 {
- // Extract HH:MM from ISO 8601
- message.timestamp[11..16].to_string()
- } else {
- message.timestamp.clone()
- };
-
- let is_assistant = message.role == ChatRole::Assistant;
-
- rsx! {
- div { class: "{bubble_class}",
- div { class: "chat-bubble-header",
- span { class: "chat-bubble-role", "{role_label}" }
- span { class: "chat-bubble-time", "{display_time}" }
- }
- if is_assistant {
- // Render markdown for assistant messages
- div {
- class: "chat-bubble-content chat-prose",
- dangerous_inner_html: "{markdown_to_html(&message.content)}",
- }
- } else {
- div { class: "chat-bubble-content", "{message.content}" }
- }
- if !message.attachments.is_empty() {
- div { class: "chat-bubble-attachments",
- for att in &message.attachments {
- span { class: "chat-attachment", "{att.name}" }
- }
- }
- }
- }
- }
-}
-
-/// Renders a streaming assistant message bubble.
-///
-/// While waiting for tokens, shows a "Thinking..." indicator with
-/// a pulsing dot animation. Once tokens arrive, renders them as
-/// markdown with a blinking cursor.
-///
-/// # Arguments
-///
-/// * `content` - The accumulated streaming content so far
-#[component]
-pub fn StreamingBubble(content: String) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- if content.is_empty() {
- // Thinking state -- no tokens yet
- rsx! {
- div { class: "chat-bubble chat-bubble--assistant chat-bubble--thinking",
- div { class: "chat-thinking",
- span { class: "chat-thinking-dots",
- span { class: "chat-dot" }
- span { class: "chat-dot" }
- span { class: "chat-dot" }
- }
- span { class: "chat-thinking-text",
- "{t(l, \"chat.thinking\")}"
- }
- }
- }
- }
- } else {
- let html = markdown_to_html(&content);
- rsx! {
- div { class: "chat-bubble chat-bubble--assistant chat-bubble--streaming",
- div { class: "chat-bubble-header",
- span { class: "chat-bubble-role",
- "{t(l, \"chat.assistant\")}"
- }
- }
- div {
- class: "chat-bubble-content chat-prose",
- dangerous_inner_html: "{html}",
- }
- span { class: "chat-streaming-cursor" }
- }
- }
- }
-}
diff --git a/src/components/chat_input_bar.rs b/src/components/chat_input_bar.rs
deleted file mode 100644
index d8dc50e..0000000
--- a/src/components/chat_input_bar.rs
+++ /dev/null
@@ -1,73 +0,0 @@
-use crate::i18n::{t, Locale};
-use dioxus::prelude::*;
-
-/// Chat input bar with a textarea and send button.
-///
-/// Enter sends the message; Shift+Enter inserts a newline.
-/// The input is disabled during streaming.
-///
-/// # Arguments
-///
-/// * `input_text` - Two-way bound input text signal
-/// * `on_send` - Callback fired with the message text when sent
-/// * `is_streaming` - Whether to disable the input (streaming in progress)
-#[component]
-pub fn ChatInputBar(
- input_text: Signal,
- on_send: EventHandler,
- is_streaming: bool,
-) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- let mut input = input_text;
-
- rsx! {
- div { class: "chat-input-bar",
- textarea {
- class: "chat-input",
- placeholder: "{t(l, \"chat.type_message\")}",
- disabled: is_streaming,
- rows: "1",
- value: "{input}",
- oninput: move |e: Event| {
- input.set(e.value());
- },
- onkeypress: move |e: Event| {
- // Enter sends, Shift+Enter adds newline
- if e.key() == Key::Enter && !e.modifiers().shift() {
- e.prevent_default();
- let text = input.read().trim().to_string();
- if !text.is_empty() {
- on_send.call(text);
- input.set(String::new());
- }
- }
- },
- }
- button {
- class: "btn-primary chat-send-btn",
- disabled: is_streaming || input.read().trim().is_empty(),
- onclick: move |_| {
- let text = input.read().trim().to_string();
- if !text.is_empty() {
- on_send.call(text);
- input.set(String::new());
- }
- },
- if is_streaming {
- // Stop icon during streaming
- dioxus_free_icons::Icon {
- icon: dioxus_free_icons::icons::fa_solid_icons::FaStop,
- width: 16, height: 16,
- }
- } else {
- dioxus_free_icons::Icon {
- icon: dioxus_free_icons::icons::fa_solid_icons::FaPaperPlane,
- width: 16, height: 16,
- }
- }
- }
- }
- }
-}
diff --git a/src/components/chat_message_list.rs b/src/components/chat_message_list.rs
deleted file mode 100644
index a9175ce..0000000
--- a/src/components/chat_message_list.rs
+++ /dev/null
@@ -1,42 +0,0 @@
-use crate::components::{ChatBubble, StreamingBubble};
-use crate::i18n::{t, Locale};
-use crate::models::ChatMessage;
-use dioxus::prelude::*;
-
-/// Scrollable message list that renders all messages in a chat session.
-///
-/// Auto-scrolls to the bottom when new messages arrive or during streaming.
-/// Shows a streaming bubble with a blinking cursor when `is_streaming` is true.
-///
-/// # Arguments
-///
-/// * `messages` - All loaded messages for the current session
-/// * `streaming_content` - Accumulated content from the SSE stream
-/// * `is_streaming` - Whether a response is currently streaming
-#[component]
-pub fn ChatMessageList(
- messages: Vec,
- streaming_content: String,
- is_streaming: bool,
-) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- rsx! {
- div {
- class: "chat-message-list",
- id: "chat-message-list",
- if messages.is_empty() && !is_streaming {
- div { class: "chat-empty",
- p { "{t(l, \"chat.send_to_start\")}" }
- }
- }
- for msg in &messages {
- ChatBubble { key: "{msg.id}", message: msg.clone() }
- }
- if is_streaming {
- StreamingBubble { content: streaming_content }
- }
- }
- }
-}
diff --git a/src/components/chat_model_selector.rs b/src/components/chat_model_selector.rs
deleted file mode 100644
index c0d3a9e..0000000
--- a/src/components/chat_model_selector.rs
+++ /dev/null
@@ -1,50 +0,0 @@
-use crate::i18n::{t, Locale};
-use dioxus::prelude::*;
-
-/// Dropdown bar for selecting the LLM model for the current chat session.
-///
-/// Displays the currently selected model and a list of available models
-/// from the Ollama instance. Fires `on_change` when the user selects
-/// a different model.
-///
-/// # Arguments
-///
-/// * `selected_model` - The currently active model ID
-/// * `available_models` - List of model names from Ollama
-/// * `on_change` - Callback fired with the new model name
-#[component]
-pub fn ChatModelSelector(
- selected_model: String,
- available_models: Vec,
- on_change: EventHandler,
-) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- rsx! {
- div { class: "chat-model-bar",
- label { class: "chat-model-label",
- "{t(l, \"chat.model_label\")}"
- }
- select {
- class: "chat-model-select",
- value: "{selected_model}",
- onchange: move |e: Event| {
- on_change.call(e.value());
- },
- for model in &available_models {
- option {
- value: "{model}",
- selected: *model == selected_model,
- "{model}"
- }
- }
- if available_models.is_empty() {
- option { disabled: true,
- "{t(l, \"chat.no_models\")}"
- }
- }
- }
- }
- }
-}
diff --git a/src/components/chat_sidebar.rs b/src/components/chat_sidebar.rs
deleted file mode 100644
index cb1c88c..0000000
--- a/src/components/chat_sidebar.rs
+++ /dev/null
@@ -1,258 +0,0 @@
-use crate::i18n::{t, tw, Locale};
-use crate::models::{ChatNamespace, ChatSession};
-use dioxus::prelude::*;
-
-/// Chat sidebar displaying grouped session list with actions.
-///
-/// Sessions are split into "News Chats" and "General" sections.
-/// Each session item shows the title and relative date, with
-/// rename and delete actions on hover.
-///
-/// # Arguments
-///
-/// * `sessions` - All chat sessions for the user
-/// * `active_session_id` - Currently selected session ID (highlighted)
-/// * `on_select` - Callback when a session is clicked
-/// * `on_new` - Callback to create a new chat session
-/// * `on_rename` - Callback with `(session_id, new_title)`
-/// * `on_delete` - Callback with `session_id`
-#[component]
-pub fn ChatSidebar(
- sessions: Vec,
- active_session_id: Option,
- on_select: EventHandler,
- on_new: EventHandler<()>,
- on_rename: EventHandler<(String, String)>,
- on_delete: EventHandler,
-) -> Element {
- let locale = use_context::>();
- let l = *locale.read();
-
- // Split sessions by namespace
- let news_sessions: Vec<&ChatSession> = sessions
- .iter()
- .filter(|s| s.namespace == ChatNamespace::News)
- .collect();
- let general_sessions: Vec<&ChatSession> = sessions
- .iter()
- .filter(|s| s.namespace == ChatNamespace::General)
- .collect();
-
- // Signal for inline rename state: Option<(session_id, current_value)>
- let rename_state: Signal