Add seven sidebar sections (Dashboard, Providers, Chat, Tools, Knowledge Base, Developer, Organization) with fully rendered mock views, nested sub-shells for Developer and Organization, and SearXNG container for future news feed integration. Replaces the previous OverviewPage with a news feed dashboard. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
75 lines
2.0 KiB
Rust
75 lines
2.0 KiB
Rust
use serde::{Deserialize, Serialize};
|
|
|
|
/// Supported LLM provider backends.
|
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
pub enum LlmProvider {
|
|
/// Self-hosted models via Ollama
|
|
Ollama,
|
|
/// Hugging Face Inference API
|
|
HuggingFace,
|
|
/// OpenAI-compatible endpoints
|
|
OpenAi,
|
|
/// Anthropic Claude API
|
|
Anthropic,
|
|
}
|
|
|
|
impl LlmProvider {
|
|
/// Returns the display name for a provider.
|
|
pub fn label(&self) -> &'static str {
|
|
match self {
|
|
Self::Ollama => "Ollama",
|
|
Self::HuggingFace => "Hugging Face",
|
|
Self::OpenAi => "OpenAI",
|
|
Self::Anthropic => "Anthropic",
|
|
}
|
|
}
|
|
}
|
|
|
|
/// A model available from a provider.
|
|
///
|
|
/// # Fields
|
|
///
|
|
/// * `id` - Unique model identifier (e.g. "llama3.1:8b")
|
|
/// * `name` - Human-readable display name
|
|
/// * `provider` - Which provider hosts this model
|
|
/// * `context_window` - Maximum context length in tokens
|
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
pub struct ModelEntry {
|
|
pub id: String,
|
|
pub name: String,
|
|
pub provider: LlmProvider,
|
|
pub context_window: u32,
|
|
}
|
|
|
|
/// An embedding model available from a provider.
|
|
///
|
|
/// # Fields
|
|
///
|
|
/// * `id` - Unique embedding model identifier
|
|
/// * `name` - Human-readable display name
|
|
/// * `provider` - Which provider hosts this model
|
|
/// * `dimensions` - Output embedding dimensions
|
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
pub struct EmbeddingEntry {
|
|
pub id: String,
|
|
pub name: String,
|
|
pub provider: LlmProvider,
|
|
pub dimensions: u32,
|
|
}
|
|
|
|
/// Active provider configuration state.
|
|
///
|
|
/// # Fields
|
|
///
|
|
/// * `provider` - Currently selected provider
|
|
/// * `selected_model` - ID of the active chat model
|
|
/// * `selected_embedding` - ID of the active embedding model
|
|
/// * `api_key_set` - Whether an API key has been configured
|
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
pub struct ProviderConfig {
|
|
pub provider: LlmProvider,
|
|
pub selected_model: String,
|
|
pub selected_embedding: String,
|
|
pub api_key_set: bool,
|
|
}
|