test: add comprehensive unit test suite (~85 new tests)

Add unit tests across all model and server infrastructure layers,
increasing test count from 7 to 92. Covers serde round-trips, enum
methods, defaults, config parsing, error mapping, PKCE crypto (with
RFC 7636 test vector), OAuth store, and SearXNG ranking/dedup logic.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Sharang Parnerkar
2026-02-24 18:43:27 +01:00
parent e244711644
commit 6890ed9b42
12 changed files with 1123 additions and 13 deletions

View File

@@ -5,13 +5,13 @@ use dioxus::prelude::*;
// The #[server] macro generates a client stub for the web build that
// sends a network request instead of executing this function body.
#[cfg(feature = "server")]
mod inner {
pub(crate) mod inner {
use serde::Deserialize;
use std::collections::HashSet;
/// Individual result from the SearXNG search API.
#[derive(Debug, Deserialize)]
pub(super) struct SearxngResult {
pub(crate) struct SearxngResult {
pub title: String,
pub url: String,
pub content: Option<String>,
@@ -25,7 +25,7 @@ mod inner {
/// Top-level response from the SearXNG search API.
#[derive(Debug, Deserialize)]
pub(super) struct SearxngResponse {
pub(crate) struct SearxngResponse {
pub results: Vec<SearxngResult>,
}
@@ -40,7 +40,7 @@ mod inner {
/// # Returns
///
/// The domain host or a fallback "Web" string
pub(super) fn extract_source(url_str: &str) -> String {
pub(crate) fn extract_source(url_str: &str) -> String {
url::Url::parse(url_str)
.ok()
.and_then(|u| u.host_str().map(String::from))
@@ -64,7 +64,7 @@ mod inner {
/// # Returns
///
/// Filtered, deduplicated, and ranked results
pub(super) fn rank_and_deduplicate(
pub(crate) fn rank_and_deduplicate(
mut results: Vec<SearxngResult>,
max_results: usize,
) -> Vec<SearxngResult> {
@@ -285,3 +285,166 @@ pub async fn get_trending_topics() -> Result<Vec<String>, ServerFnError> {
Ok(topics)
}
#[cfg(all(test, feature = "server"))]
mod tests {
#![allow(clippy::unwrap_used, clippy::expect_used)]
use super::inner::*;
use pretty_assertions::assert_eq;
// -----------------------------------------------------------------------
// extract_source()
// -----------------------------------------------------------------------
#[test]
fn extract_source_strips_www() {
assert_eq!(
extract_source("https://www.example.com/page"),
"example.com"
);
}
#[test]
fn extract_source_returns_domain() {
assert_eq!(
extract_source("https://techcrunch.com/article"),
"techcrunch.com"
);
}
#[test]
fn extract_source_invalid_url_returns_web() {
assert_eq!(extract_source("not-a-url"), "Web");
}
#[test]
fn extract_source_no_scheme_returns_web() {
// url::Url::parse requires a scheme; bare domain fails
assert_eq!(extract_source("example.com/path"), "Web");
}
// -----------------------------------------------------------------------
// rank_and_deduplicate()
// -----------------------------------------------------------------------
fn make_result(url: &str, content: &str, score: f64) -> SearxngResult {
SearxngResult {
title: "Title".into(),
url: url.into(),
content: if content.is_empty() {
None
} else {
Some(content.into())
},
published_date: None,
thumbnail: None,
score,
}
}
#[test]
fn rank_filters_empty_content() {
let results = vec![
make_result("https://a.com", "", 10.0),
make_result(
"https://b.com",
"This is meaningful content that passes the length filter",
5.0,
),
];
let ranked = rank_and_deduplicate(results, 10);
assert_eq!(ranked.len(), 1);
assert_eq!(ranked[0].url, "https://b.com");
}
#[test]
fn rank_filters_short_content() {
let results = vec![
make_result("https://a.com", "short", 10.0),
make_result(
"https://b.com",
"This content is long enough to pass the 20-char filter threshold",
5.0,
),
];
let ranked = rank_and_deduplicate(results, 10);
assert_eq!(ranked.len(), 1);
}
#[test]
fn rank_deduplicates_by_domain_keeps_highest() {
let results = vec![
make_result(
"https://example.com/page1",
"First result with enough content here for the filter",
3.0,
),
make_result(
"https://example.com/page2",
"Second result with enough content here for the filter",
8.0,
),
];
let ranked = rank_and_deduplicate(results, 10);
assert_eq!(ranked.len(), 1);
// Should keep the highest-scored one (page2 with score 8.0)
assert_eq!(ranked[0].url, "https://example.com/page2");
}
#[test]
fn rank_sorts_by_score_descending() {
let results = vec![
make_result(
"https://a.com/p",
"Content A that is long enough to pass the filter check",
1.0,
),
make_result(
"https://b.com/p",
"Content B that is long enough to pass the filter check",
5.0,
),
make_result(
"https://c.com/p",
"Content C that is long enough to pass the filter check",
3.0,
),
];
let ranked = rank_and_deduplicate(results, 10);
assert_eq!(ranked.len(), 3);
assert!(ranked[0].score >= ranked[1].score);
assert!(ranked[1].score >= ranked[2].score);
}
#[test]
fn rank_truncates_to_max_results() {
let results: Vec<_> = (0..20)
.map(|i| {
make_result(
&format!("https://site{i}.com/page"),
&format!("Content for site {i} that is long enough to pass the filter"),
i as f64,
)
})
.collect();
let ranked = rank_and_deduplicate(results, 5);
assert_eq!(ranked.len(), 5);
}
#[test]
fn rank_empty_input_returns_empty() {
let ranked = rank_and_deduplicate(vec![], 10);
assert!(ranked.is_empty());
}
#[test]
fn rank_all_filtered_returns_empty() {
let results = vec![
make_result("https://a.com", "", 10.0),
make_result("https://b.com", "too short", 5.0),
];
let ranked = rank_and_deduplicate(results, 10);
assert!(ranked.is_empty());
}
}