Files
certifai/src/pages/providers.rs
Sharang Parnerkar d814e22f9d
All checks were successful
CI / Format (push) Successful in 3s
CI / Clippy (push) Successful in 3m4s
CI / Security Audit (push) Successful in 1m39s
CI / Tests (push) Successful in 4m26s
CI / Deploy (push) Successful in 5s
feat(i18n): add internationalization with DE, FR, ES, PT translations (#12)
Add a compile-time i18n system with 270 translation keys across 5 locales
(EN, DE, FR, ES, PT). Translations are embedded via include_str! and parsed
lazily into flat HashMaps with English fallback for missing keys.

- Add src/i18n module with Locale enum, t()/tw() lookup functions, and tests
- Add JSON translation files for all 5 locales under assets/i18n/
- Provide locale Signal via Dioxus context in App, persisted to localStorage
- Replace all hardcoded UI strings across 33 component/page files
- Add compact locale picker (globe icon + ISO alpha-2 code) in sidebar header
- Add click-outside backdrop dismissal for locale dropdown

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com>
Reviewed-on: #12
2026-02-22 16:48:51 +00:00

226 lines
8.9 KiB
Rust

use dioxus::prelude::*;
use crate::components::PageHeader;
use crate::i18n::{t, Locale};
use crate::models::{EmbeddingEntry, LlmProvider, ModelEntry, ProviderConfig};
/// Providers page for configuring LLM and embedding model backends.
///
/// Two-column layout: left side has a configuration form, right side
/// shows the currently active provider status.
#[component]
pub fn ProvidersPage() -> Element {
let locale = use_context::<Signal<Locale>>();
let l = *locale.read();
let mut selected_provider = use_signal(|| LlmProvider::Ollama);
let mut selected_model = use_signal(|| "llama3.1:8b".to_string());
let mut selected_embedding = use_signal(|| "nomic-embed-text".to_string());
let mut api_key = use_signal(String::new);
let mut saved = use_signal(|| false);
let models = mock_models();
let embeddings = mock_embeddings();
// Filter models/embeddings by selected provider
let provider_val = selected_provider.read().clone();
let available_models: Vec<_> = models
.iter()
.filter(|m| m.provider == provider_val)
.collect();
let available_embeddings: Vec<_> = embeddings
.iter()
.filter(|e| e.provider == provider_val)
.collect();
let active_config = ProviderConfig {
provider: provider_val.clone(),
selected_model: selected_model.read().clone(),
selected_embedding: selected_embedding.read().clone(),
api_key_set: !api_key.read().is_empty(),
};
rsx! {
section { class: "providers-page",
PageHeader {
title: t(l, "providers.title"),
subtitle: t(l, "providers.subtitle"),
}
div { class: "providers-layout",
div { class: "providers-form",
div { class: "form-group",
label { "{t(l, \"providers.provider\")}" }
select {
class: "form-select",
value: "{provider_val.label()}",
onchange: move |evt: Event<FormData>| {
let val = evt.value();
let prov = match val.as_str() {
"Hugging Face" => LlmProvider::HuggingFace,
"OpenAI" => LlmProvider::OpenAi,
"Anthropic" => LlmProvider::Anthropic,
_ => LlmProvider::Ollama,
};
selected_provider.set(prov);
saved.set(false);
},
option { value: "Ollama", "Ollama" }
option { value: "Hugging Face", "Hugging Face" }
option { value: "OpenAI", "OpenAI" }
option { value: "Anthropic", "Anthropic" }
}
}
div { class: "form-group",
label { "{t(l, \"providers.model\")}" }
select {
class: "form-select",
value: "{selected_model}",
onchange: move |evt: Event<FormData>| {
selected_model.set(evt.value());
saved.set(false);
},
for m in &available_models {
option { value: "{m.id}", "{m.name} ({m.context_window}k ctx)" }
}
}
}
div { class: "form-group",
label { "{t(l, \"providers.embedding_model\")}" }
select {
class: "form-select",
value: "{selected_embedding}",
onchange: move |evt: Event<FormData>| {
selected_embedding.set(evt.value());
saved.set(false);
},
for e in &available_embeddings {
option { value: "{e.id}", "{e.name} ({e.dimensions}d)" }
}
}
}
div { class: "form-group",
label { "{t(l, \"providers.api_key\")}" }
input {
class: "form-input",
r#type: "password",
placeholder: "{t(l, \"providers.api_key_placeholder\")}",
value: "{api_key}",
oninput: move |evt: Event<FormData>| {
api_key.set(evt.value());
saved.set(false);
},
}
}
button {
class: "btn-primary",
onclick: move |_| saved.set(true),
"{t(l, \"providers.save_config\")}"
}
if *saved.read() {
p { class: "form-success", "{t(l, \"providers.config_saved\")}" }
}
}
div { class: "providers-status",
h3 { "{t(l, \"providers.active_config\")}" }
div { class: "status-card",
div { class: "status-row",
span { class: "status-label", "{t(l, \"providers.provider\")}" }
span { class: "status-value", "{active_config.provider.label()}" }
}
div { class: "status-row",
span { class: "status-label", "{t(l, \"providers.model\")}" }
span { class: "status-value", "{active_config.selected_model}" }
}
div { class: "status-row",
span { class: "status-label", "{t(l, \"providers.embedding\")}" }
span { class: "status-value", "{active_config.selected_embedding}" }
}
div { class: "status-row",
span { class: "status-label", "{t(l, \"providers.api_key\")}" }
span { class: "status-value",
if active_config.api_key_set {
"{t(l, \"common.set\")}"
} else {
"{t(l, \"common.not_set\")}"
}
}
}
}
}
}
}
}
}
/// Returns mock model entries for all providers.
fn mock_models() -> Vec<ModelEntry> {
vec![
ModelEntry {
id: "llama3.1:8b".into(),
name: "Llama 3.1 8B".into(),
provider: LlmProvider::Ollama,
context_window: 128,
},
ModelEntry {
id: "llama3.1:70b".into(),
name: "Llama 3.1 70B".into(),
provider: LlmProvider::Ollama,
context_window: 128,
},
ModelEntry {
id: "mistral:7b".into(),
name: "Mistral 7B".into(),
provider: LlmProvider::Ollama,
context_window: 32,
},
ModelEntry {
id: "meta-llama/Llama-3.1-8B".into(),
name: "Llama 3.1 8B".into(),
provider: LlmProvider::HuggingFace,
context_window: 128,
},
ModelEntry {
id: "gpt-4o".into(),
name: "GPT-4o".into(),
provider: LlmProvider::OpenAi,
context_window: 128,
},
ModelEntry {
id: "claude-sonnet-4-6".into(),
name: "Claude Sonnet 4.6".into(),
provider: LlmProvider::Anthropic,
context_window: 200,
},
]
}
/// Returns mock embedding entries for all providers.
fn mock_embeddings() -> Vec<EmbeddingEntry> {
vec![
EmbeddingEntry {
id: "nomic-embed-text".into(),
name: "Nomic Embed Text".into(),
provider: LlmProvider::Ollama,
dimensions: 768,
},
EmbeddingEntry {
id: "sentence-transformers/all-MiniLM-L6-v2".into(),
name: "MiniLM-L6-v2".into(),
provider: LlmProvider::HuggingFace,
dimensions: 384,
},
EmbeddingEntry {
id: "text-embedding-3-small".into(),
name: "Embedding 3 Small".into(),
provider: LlmProvider::OpenAi,
dimensions: 1536,
},
EmbeddingEntry {
id: "voyage-3".into(),
name: "Voyage 3".into(),
provider: LlmProvider::Anthropic,
dimensions: 1024,
},
]
}