test: added more tests (#16)
Some checks failed
CI / Format (push) Successful in 3s
CI / Clippy (push) Successful in 2m47s
CI / Security Audit (push) Successful in 1m35s
CI / Tests (push) Successful in 3m54s
CI / E2E Tests (push) Failing after 16s
CI / Deploy (push) Has been skipped

Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com>
Reviewed-on: #16
This commit was merged in pull request #16.
This commit is contained in:
2026-02-25 10:01:56 +00:00
parent 9085da9fae
commit 1d7aebf37c
29 changed files with 2243 additions and 22 deletions

View File

@@ -440,7 +440,12 @@ pub async fn chat_complete(
let session = doc_to_chat_session(&session_doc);
// Resolve provider URL and model
let (base_url, model) = resolve_provider_url(&state, &session.provider, &session.model);
let (base_url, model) = resolve_provider_url(
&state.services.ollama_url,
&state.services.ollama_model,
&session.provider,
&session.model,
);
// Parse messages from JSON
let chat_msgs: Vec<serde_json::Value> = serde_json::from_str(&messages_json)
@@ -480,10 +485,22 @@ pub async fn chat_complete(
.ok_or_else(|| ServerFnError::new("empty LLM response"))
}
/// Resolve the base URL for a provider, falling back to server defaults.
/// Resolve the base URL for a provider, falling back to Ollama defaults.
///
/// # Arguments
///
/// * `ollama_url` - Default Ollama base URL from config
/// * `ollama_model` - Default Ollama model from config
/// * `provider` - Provider name (e.g. "openai", "anthropic", "huggingface")
/// * `model` - Model ID (may be empty for Ollama default)
///
/// # Returns
///
/// A `(base_url, model)` tuple resolved for the given provider.
#[cfg(feature = "server")]
fn resolve_provider_url(
state: &crate::infrastructure::ServerState,
pub(crate) fn resolve_provider_url(
ollama_url: &str,
ollama_model: &str,
provider: &str,
model: &str,
) -> (String, String) {
@@ -496,12 +513,229 @@ fn resolve_provider_url(
),
// Default to Ollama
_ => (
state.services.ollama_url.clone(),
ollama_url.to_string(),
if model.is_empty() {
state.services.ollama_model.clone()
ollama_model.to_string()
} else {
model.to_string()
},
),
}
}
#[cfg(test)]
mod tests {
// -----------------------------------------------------------------------
// BSON document conversion tests (server feature required)
// -----------------------------------------------------------------------
#[cfg(feature = "server")]
mod server_tests {
use super::super::{doc_to_chat_message, doc_to_chat_session, resolve_provider_url};
use crate::models::{ChatNamespace, ChatRole};
use mongodb::bson::{doc, oid::ObjectId, Document};
use pretty_assertions::assert_eq;
// -- doc_to_chat_session --
fn sample_session_doc() -> (ObjectId, Document) {
let oid = ObjectId::new();
let doc = doc! {
"_id": oid,
"user_sub": "user-42",
"title": "Test Session",
"namespace": "News",
"provider": "openai",
"model": "gpt-4",
"created_at": "2025-01-01T00:00:00Z",
"updated_at": "2025-01-02T00:00:00Z",
"article_url": "https://example.com/article",
};
(oid, doc)
}
#[test]
fn doc_to_chat_session_extracts_id_as_hex() {
let (oid, doc) = sample_session_doc();
let session = doc_to_chat_session(&doc);
assert_eq!(session.id, oid.to_hex());
}
#[test]
fn doc_to_chat_session_maps_news_namespace() {
let (_, doc) = sample_session_doc();
let session = doc_to_chat_session(&doc);
assert_eq!(session.namespace, ChatNamespace::News);
}
#[test]
fn doc_to_chat_session_defaults_to_general_for_unknown() {
let mut doc = sample_session_doc().1;
doc.insert("namespace", "SomethingElse");
let session = doc_to_chat_session(&doc);
assert_eq!(session.namespace, ChatNamespace::General);
}
#[test]
fn doc_to_chat_session_extracts_all_string_fields() {
let (_, doc) = sample_session_doc();
let session = doc_to_chat_session(&doc);
assert_eq!(session.user_sub, "user-42");
assert_eq!(session.title, "Test Session");
assert_eq!(session.provider, "openai");
assert_eq!(session.model, "gpt-4");
assert_eq!(session.created_at, "2025-01-01T00:00:00Z");
assert_eq!(session.updated_at, "2025-01-02T00:00:00Z");
}
#[test]
fn doc_to_chat_session_handles_missing_article_url() {
let oid = ObjectId::new();
let doc = doc! {
"_id": oid,
"user_sub": "u",
"title": "t",
"provider": "ollama",
"model": "m",
"created_at": "c",
"updated_at": "u",
};
let session = doc_to_chat_session(&doc);
assert_eq!(session.article_url, None);
}
#[test]
fn doc_to_chat_session_filters_empty_article_url() {
let oid = ObjectId::new();
let doc = doc! {
"_id": oid,
"user_sub": "u",
"title": "t",
"namespace": "News",
"provider": "ollama",
"model": "m",
"created_at": "c",
"updated_at": "u",
"article_url": "",
};
let session = doc_to_chat_session(&doc);
assert_eq!(session.article_url, None);
}
// -- doc_to_chat_message --
fn sample_message_doc() -> (ObjectId, Document) {
let oid = ObjectId::new();
let doc = doc! {
"_id": oid,
"session_id": "sess-1",
"role": "Assistant",
"content": "Hello there!",
"timestamp": "2025-01-01T12:00:00Z",
};
(oid, doc)
}
#[test]
fn doc_to_chat_message_extracts_id_as_hex() {
let (oid, doc) = sample_message_doc();
let msg = doc_to_chat_message(&doc);
assert_eq!(msg.id, oid.to_hex());
}
#[test]
fn doc_to_chat_message_maps_assistant_role() {
let (_, doc) = sample_message_doc();
let msg = doc_to_chat_message(&doc);
assert_eq!(msg.role, ChatRole::Assistant);
}
#[test]
fn doc_to_chat_message_maps_system_role() {
let mut doc = sample_message_doc().1;
doc.insert("role", "System");
let msg = doc_to_chat_message(&doc);
assert_eq!(msg.role, ChatRole::System);
}
#[test]
fn doc_to_chat_message_defaults_to_user_for_unknown() {
let mut doc = sample_message_doc().1;
doc.insert("role", "SomethingElse");
let msg = doc_to_chat_message(&doc);
assert_eq!(msg.role, ChatRole::User);
}
#[test]
fn doc_to_chat_message_extracts_content_and_timestamp() {
let (_, doc) = sample_message_doc();
let msg = doc_to_chat_message(&doc);
assert_eq!(msg.content, "Hello there!");
assert_eq!(msg.timestamp, "2025-01-01T12:00:00Z");
assert_eq!(msg.session_id, "sess-1");
}
#[test]
fn doc_to_chat_message_attachments_always_empty() {
let (_, doc) = sample_message_doc();
let msg = doc_to_chat_message(&doc);
assert!(msg.attachments.is_empty());
}
// -- resolve_provider_url --
const TEST_OLLAMA_URL: &str = "http://localhost:11434";
const TEST_OLLAMA_MODEL: &str = "llama3.1:8b";
#[test]
fn resolve_openai_returns_api_openai() {
let (url, model) =
resolve_provider_url(TEST_OLLAMA_URL, TEST_OLLAMA_MODEL, "openai", "gpt-4o");
assert_eq!(url, "https://api.openai.com");
assert_eq!(model, "gpt-4o");
}
#[test]
fn resolve_anthropic_returns_api_anthropic() {
let (url, model) = resolve_provider_url(
TEST_OLLAMA_URL,
TEST_OLLAMA_MODEL,
"anthropic",
"claude-3-opus",
);
assert_eq!(url, "https://api.anthropic.com");
assert_eq!(model, "claude-3-opus");
}
#[test]
fn resolve_huggingface_returns_model_url() {
let (url, model) = resolve_provider_url(
TEST_OLLAMA_URL,
TEST_OLLAMA_MODEL,
"huggingface",
"meta-llama/Llama-2-7b",
);
assert_eq!(
url,
"https://api-inference.huggingface.co/models/meta-llama/Llama-2-7b"
);
assert_eq!(model, "meta-llama/Llama-2-7b");
}
#[test]
fn resolve_unknown_defaults_to_ollama() {
let (url, model) =
resolve_provider_url(TEST_OLLAMA_URL, TEST_OLLAMA_MODEL, "ollama", "mistral:7b");
assert_eq!(url, TEST_OLLAMA_URL);
assert_eq!(model, "mistral:7b");
}
#[test]
fn resolve_empty_model_falls_back_to_server_default() {
let (url, model) =
resolve_provider_url(TEST_OLLAMA_URL, TEST_OLLAMA_MODEL, "ollama", "");
assert_eq!(url, TEST_OLLAMA_URL);
assert_eq!(model, TEST_OLLAMA_MODEL);
}
}
}