Add a compile-time i18n system with 270 translation keys across 5 locales (EN, DE, FR, ES, PT). Translations are embedded via include_str! and parsed lazily into flat HashMaps with English fallback for missing keys. - Add src/i18n module with Locale enum, t()/tw() lookup functions, and tests - Add JSON translation files for all 5 locales under assets/i18n/ - Provide locale Signal via Dioxus context in App, persisted to localStorage - Replace all hardcoded UI strings across 33 component/page files - Add compact locale picker (globe icon + ISO alpha-2 code) in sidebar header - Add click-outside backdrop dismissal for locale dropdown Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com> Reviewed-on: #12
51 lines
1.6 KiB
Rust
51 lines
1.6 KiB
Rust
use crate::i18n::{t, Locale};
|
|
use dioxus::prelude::*;
|
|
|
|
/// Dropdown bar for selecting the LLM model for the current chat session.
|
|
///
|
|
/// Displays the currently selected model and a list of available models
|
|
/// from the Ollama instance. Fires `on_change` when the user selects
|
|
/// a different model.
|
|
///
|
|
/// # Arguments
|
|
///
|
|
/// * `selected_model` - The currently active model ID
|
|
/// * `available_models` - List of model names from Ollama
|
|
/// * `on_change` - Callback fired with the new model name
|
|
#[component]
|
|
pub fn ChatModelSelector(
|
|
selected_model: String,
|
|
available_models: Vec<String>,
|
|
on_change: EventHandler<String>,
|
|
) -> Element {
|
|
let locale = use_context::<Signal<Locale>>();
|
|
let l = *locale.read();
|
|
|
|
rsx! {
|
|
div { class: "chat-model-bar",
|
|
label { class: "chat-model-label",
|
|
"{t(l, \"chat.model_label\")}"
|
|
}
|
|
select {
|
|
class: "chat-model-select",
|
|
value: "{selected_model}",
|
|
onchange: move |e: Event<FormData>| {
|
|
on_change.call(e.value());
|
|
},
|
|
for model in &available_models {
|
|
option {
|
|
value: "{model}",
|
|
selected: *model == selected_model,
|
|
"{model}"
|
|
}
|
|
}
|
|
if available_models.is_empty() {
|
|
option { disabled: true,
|
|
"{t(l, \"chat.no_models\")}"
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|