feat(chat): added chat interface and connection to ollama (#10)
All checks were successful
CI / Format (push) Successful in 2s
CI / Clippy (push) Successful in 2m13s
CI / Security Audit (push) Successful in 1m37s
CI / Tests (push) Successful in 2m52s
CI / Deploy (push) Successful in 2s

Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com>
Reviewed-on: #10
This commit was merged in pull request #10.
This commit is contained in:
2026-02-20 19:40:25 +00:00
parent 4acb4558b7
commit 50237f5377
28 changed files with 3148 additions and 196 deletions

View File

@@ -0,0 +1,42 @@
use dioxus::prelude::*;
/// Dropdown bar for selecting the LLM model for the current chat session.
///
/// Displays the currently selected model and a list of available models
/// from the Ollama instance. Fires `on_change` when the user selects
/// a different model.
///
/// # Arguments
///
/// * `selected_model` - The currently active model ID
/// * `available_models` - List of model names from Ollama
/// * `on_change` - Callback fired with the new model name
#[component]
pub fn ChatModelSelector(
selected_model: String,
available_models: Vec<String>,
on_change: EventHandler<String>,
) -> Element {
rsx! {
div { class: "chat-model-bar",
label { class: "chat-model-label", "Model:" }
select {
class: "chat-model-select",
value: "{selected_model}",
onchange: move |e: Event<FormData>| {
on_change.call(e.value());
},
for model in &available_models {
option {
value: "{model}",
selected: *model == selected_model,
"{model}"
}
}
if available_models.is_empty() {
option { disabled: true, "No models available" }
}
}
}
}
}