All checks were successful
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 28s
CI / test-go-edu-search (push) Successful in 27s
CI / test-python-klausur (push) Successful in 1m45s
CI / test-python-agent-core (push) Successful in 16s
CI / test-nodejs-website (push) Successful in 21s
- edu-search-service von breakpilot-pwa nach breakpilot-lehrer kopiert (ohne vendor) - opensearch + edu-search-service in docker-compose.yml hinzugefuegt - voice-service aus docker-compose.yml entfernt (jetzt in breakpilot-core) - geo-service aus docker-compose.yml entfernt (nicht mehr benoetigt) - CI/CD: edu-search-service zu Gitea Actions und Woodpecker hinzugefuegt (Go lint, test mit go mod download, build, SBOM) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
640 lines
16 KiB
Go
640 lines
16 KiB
Go
package crawler
|
|
|
|
import (
|
|
"context"
|
|
"net/http"
|
|
"net/http/httptest"
|
|
"os"
|
|
"path/filepath"
|
|
"strings"
|
|
"testing"
|
|
"time"
|
|
)
|
|
|
|
func TestNewCrawler(t *testing.T) {
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
|
|
if crawler == nil {
|
|
t.Fatal("Expected non-nil crawler")
|
|
}
|
|
if crawler.userAgent != "TestBot/1.0" {
|
|
t.Errorf("Expected userAgent 'TestBot/1.0', got %q", crawler.userAgent)
|
|
}
|
|
if crawler.rateLimitPerSec != 1.0 {
|
|
t.Errorf("Expected rateLimitPerSec 1.0, got %f", crawler.rateLimitPerSec)
|
|
}
|
|
if crawler.maxDepth != 3 {
|
|
t.Errorf("Expected maxDepth 3, got %d", crawler.maxDepth)
|
|
}
|
|
if crawler.client == nil {
|
|
t.Error("Expected non-nil HTTP client")
|
|
}
|
|
}
|
|
|
|
func TestCrawler_LoadSeeds(t *testing.T) {
|
|
// Create temp directory with seed files
|
|
dir := t.TempDir()
|
|
|
|
// Create a seed file
|
|
seedContent := `# Federal education sources
|
|
https://www.kmk.org
|
|
https://www.bildungsserver.de
|
|
|
|
# Comment line
|
|
https://www.bpb.de # with inline comment
|
|
`
|
|
if err := os.WriteFile(filepath.Join(dir, "federal.txt"), []byte(seedContent), 0644); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
// Create another seed file
|
|
stateContent := `https://www.km.bayern.de
|
|
https://www.schulministerium.nrw.de
|
|
`
|
|
if err := os.WriteFile(filepath.Join(dir, "states.txt"), []byte(stateContent), 0644); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
// Create denylist
|
|
denylistContent := `# Denylist
|
|
facebook.com
|
|
twitter.com
|
|
instagram.com
|
|
`
|
|
if err := os.WriteFile(filepath.Join(dir, "denylist.txt"), []byte(denylistContent), 0644); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
seeds, err := crawler.LoadSeeds(dir)
|
|
if err != nil {
|
|
t.Fatalf("LoadSeeds failed: %v", err)
|
|
}
|
|
|
|
// Check seeds loaded
|
|
if len(seeds) != 5 {
|
|
t.Errorf("Expected 5 seeds, got %d", len(seeds))
|
|
}
|
|
|
|
// Check expected URLs
|
|
expectedURLs := []string{
|
|
"https://www.kmk.org",
|
|
"https://www.bildungsserver.de",
|
|
"https://www.bpb.de",
|
|
"https://www.km.bayern.de",
|
|
"https://www.schulministerium.nrw.de",
|
|
}
|
|
|
|
for _, expected := range expectedURLs {
|
|
found := false
|
|
for _, seed := range seeds {
|
|
if seed == expected {
|
|
found = true
|
|
break
|
|
}
|
|
}
|
|
if !found {
|
|
t.Errorf("Expected seed %q not found", expected)
|
|
}
|
|
}
|
|
|
|
// Check denylist loaded
|
|
if len(crawler.denylist) != 3 {
|
|
t.Errorf("Expected 3 denylist entries, got %d", len(crawler.denylist))
|
|
}
|
|
}
|
|
|
|
func TestCrawler_IsDenied(t *testing.T) {
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
crawler.denylist = map[string]bool{
|
|
"facebook.com": true,
|
|
"twitter.com": true,
|
|
"ads.example.com": true,
|
|
}
|
|
|
|
tests := []struct {
|
|
name string
|
|
url string
|
|
expected bool
|
|
}{
|
|
{
|
|
name: "Exact domain match",
|
|
url: "https://facebook.com/page",
|
|
expected: true,
|
|
},
|
|
{
|
|
name: "Subdomain of denied domain",
|
|
url: "https://www.facebook.com/page",
|
|
expected: true,
|
|
},
|
|
{
|
|
name: "Allowed domain",
|
|
url: "https://www.kmk.org/bildung",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "Denied subdomain",
|
|
url: "https://ads.example.com/banner",
|
|
expected: true,
|
|
},
|
|
{
|
|
name: "Parent domain allowed",
|
|
url: "https://example.com/page",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "Invalid URL scheme",
|
|
url: "://invalid",
|
|
expected: true,
|
|
},
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
result := crawler.IsDenied(tt.url)
|
|
if result != tt.expected {
|
|
t.Errorf("IsDenied(%q) = %v, expected %v", tt.url, result, tt.expected)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_Success(t *testing.T) {
|
|
// Create test server
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
// Check user agent
|
|
if r.Header.Get("User-Agent") != "TestBot/1.0" {
|
|
t.Errorf("Expected User-Agent 'TestBot/1.0', got %q", r.Header.Get("User-Agent"))
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
|
w.WriteHeader(http.StatusOK)
|
|
w.Write([]byte("<html><body>Test content</body></html>"))
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3) // High rate limit for testing
|
|
ctx := context.Background()
|
|
|
|
result, err := crawler.Fetch(ctx, server.URL+"/page")
|
|
if err != nil {
|
|
t.Fatalf("Fetch failed: %v", err)
|
|
}
|
|
|
|
if result.StatusCode != 200 {
|
|
t.Errorf("Expected status 200, got %d", result.StatusCode)
|
|
}
|
|
if result.Error != nil {
|
|
t.Errorf("Expected no error, got %v", result.Error)
|
|
}
|
|
if !strings.Contains(result.ContentType, "text/html") {
|
|
t.Errorf("Expected Content-Type to contain 'text/html', got %q", result.ContentType)
|
|
}
|
|
if len(result.Body) == 0 {
|
|
t.Error("Expected non-empty body")
|
|
}
|
|
if result.ContentHash == "" {
|
|
t.Error("Expected non-empty content hash")
|
|
}
|
|
if result.FetchTime.IsZero() {
|
|
t.Error("Expected non-zero fetch time")
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_DeniedDomain(t *testing.T) {
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3)
|
|
crawler.denylist = map[string]bool{
|
|
"denied.com": true,
|
|
}
|
|
|
|
ctx := context.Background()
|
|
result, err := crawler.Fetch(ctx, "https://denied.com/page")
|
|
|
|
if err == nil {
|
|
t.Error("Expected error for denied domain")
|
|
}
|
|
if result.Error == nil {
|
|
t.Error("Expected error in result")
|
|
}
|
|
if !strings.Contains(result.Error.Error(), "denied") {
|
|
t.Errorf("Expected 'denied' in error message, got %v", result.Error)
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_HTTPError(t *testing.T) {
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
w.WriteHeader(http.StatusNotFound)
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3)
|
|
ctx := context.Background()
|
|
|
|
result, err := crawler.Fetch(ctx, server.URL+"/notfound")
|
|
if err == nil {
|
|
t.Error("Expected error for 404 response")
|
|
}
|
|
if result.StatusCode != 404 {
|
|
t.Errorf("Expected status 404, got %d", result.StatusCode)
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_Redirect(t *testing.T) {
|
|
redirectCount := 0
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
if r.URL.Path == "/redirect" {
|
|
redirectCount++
|
|
http.Redirect(w, r, "/final", http.StatusFound)
|
|
return
|
|
}
|
|
w.WriteHeader(http.StatusOK)
|
|
w.Write([]byte("Final content"))
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3)
|
|
ctx := context.Background()
|
|
|
|
result, err := crawler.Fetch(ctx, server.URL+"/redirect")
|
|
if err != nil {
|
|
t.Fatalf("Fetch failed: %v", err)
|
|
}
|
|
|
|
// CanonicalURL should be the final URL after redirect
|
|
if !strings.HasSuffix(result.CanonicalURL, "/final") {
|
|
t.Errorf("Expected canonical URL to end with '/final', got %q", result.CanonicalURL)
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_Timeout(t *testing.T) {
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
time.Sleep(2 * time.Second) // Delay response
|
|
w.WriteHeader(http.StatusOK)
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3)
|
|
crawler.timeout = 100 * time.Millisecond // Very short timeout
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond)
|
|
defer cancel()
|
|
|
|
_, err := crawler.Fetch(ctx, server.URL+"/slow")
|
|
if err == nil {
|
|
t.Error("Expected timeout error")
|
|
}
|
|
}
|
|
|
|
func TestExtractDomain(t *testing.T) {
|
|
tests := []struct {
|
|
url string
|
|
expected string
|
|
}{
|
|
{
|
|
url: "https://www.example.com/page",
|
|
expected: "www.example.com",
|
|
},
|
|
{
|
|
url: "https://example.com:8080/path",
|
|
expected: "example.com:8080",
|
|
},
|
|
{
|
|
url: "http://subdomain.example.com",
|
|
expected: "subdomain.example.com",
|
|
},
|
|
{
|
|
url: "invalid-url",
|
|
expected: "",
|
|
},
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
t.Run(tt.url, func(t *testing.T) {
|
|
result := ExtractDomain(tt.url)
|
|
if result != tt.expected {
|
|
t.Errorf("ExtractDomain(%q) = %q, expected %q", tt.url, result, tt.expected)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestGenerateDocID(t *testing.T) {
|
|
id1 := GenerateDocID()
|
|
id2 := GenerateDocID()
|
|
|
|
if id1 == "" {
|
|
t.Error("Expected non-empty ID")
|
|
}
|
|
if id1 == id2 {
|
|
t.Error("Expected unique IDs")
|
|
}
|
|
// UUID format check (basic)
|
|
if len(id1) != 36 {
|
|
t.Errorf("Expected UUID length 36, got %d", len(id1))
|
|
}
|
|
}
|
|
|
|
func TestNormalizeURL(t *testing.T) {
|
|
tests := []struct {
|
|
name string
|
|
url string
|
|
expected string
|
|
}{
|
|
{
|
|
name: "Remove trailing slash",
|
|
url: "https://example.com/page/",
|
|
expected: "https://example.com/page",
|
|
},
|
|
{
|
|
name: "Remove UTM parameters",
|
|
url: "https://example.com/page?utm_source=google&utm_medium=cpc",
|
|
expected: "https://example.com/page",
|
|
},
|
|
{
|
|
name: "Remove multiple tracking params",
|
|
url: "https://example.com/page?id=123&utm_campaign=test&fbclid=abc",
|
|
expected: "https://example.com/page?id=123",
|
|
},
|
|
{
|
|
name: "Keep non-tracking params",
|
|
url: "https://example.com/search?q=test&page=2",
|
|
expected: "https://example.com/search?page=2&q=test",
|
|
},
|
|
{
|
|
name: "Lowercase host",
|
|
url: "https://EXAMPLE.COM/Page",
|
|
expected: "https://example.com/Page",
|
|
},
|
|
{
|
|
name: "Invalid URL returns as-is",
|
|
url: "not-a-url",
|
|
expected: "not-a-url",
|
|
},
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
result := NormalizeURL(tt.url)
|
|
if result != tt.expected {
|
|
t.Errorf("NormalizeURL(%q) = %q, expected %q", tt.url, result, tt.expected)
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestCrawler_RateLimit(t *testing.T) {
|
|
requestCount := 0
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
requestCount++
|
|
w.WriteHeader(http.StatusOK)
|
|
w.Write([]byte("OK"))
|
|
}))
|
|
defer server.Close()
|
|
|
|
// 2 requests per second = 500ms between requests
|
|
crawler := NewCrawler("TestBot/1.0", 2.0, 3)
|
|
ctx := context.Background()
|
|
|
|
start := time.Now()
|
|
|
|
// Make 3 requests
|
|
for i := 0; i < 3; i++ {
|
|
crawler.Fetch(ctx, server.URL+"/page")
|
|
}
|
|
|
|
elapsed := time.Since(start)
|
|
|
|
// With 2 req/sec, 3 requests should take at least 1 second (2 intervals)
|
|
if elapsed < 800*time.Millisecond {
|
|
t.Errorf("Rate limiting not working: 3 requests took only %v", elapsed)
|
|
}
|
|
}
|
|
|
|
func TestLoadSeedFile_EmptyLines(t *testing.T) {
|
|
dir := t.TempDir()
|
|
|
|
content := `
|
|
|
|
https://example.com
|
|
|
|
# comment
|
|
|
|
https://example.org
|
|
|
|
`
|
|
if err := os.WriteFile(filepath.Join(dir, "seeds.txt"), []byte(content), 0644); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
seeds, err := crawler.LoadSeeds(dir)
|
|
if err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
if len(seeds) != 2 {
|
|
t.Errorf("Expected 2 seeds (ignoring empty lines and comments), got %d", len(seeds))
|
|
}
|
|
}
|
|
|
|
func TestCrawler_Fetch_LargeBody(t *testing.T) {
|
|
// Create a large response (but under the limit)
|
|
largeBody := strings.Repeat("A", 1024*1024) // 1MB
|
|
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
w.Header().Set("Content-Type", "text/plain")
|
|
w.WriteHeader(http.StatusOK)
|
|
w.Write([]byte(largeBody))
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 100.0, 3)
|
|
ctx := context.Background()
|
|
|
|
result, err := crawler.Fetch(ctx, server.URL+"/large")
|
|
if err != nil {
|
|
t.Fatalf("Fetch failed: %v", err)
|
|
}
|
|
|
|
if len(result.Body) != len(largeBody) {
|
|
t.Errorf("Expected body length %d, got %d", len(largeBody), len(result.Body))
|
|
}
|
|
}
|
|
|
|
// Tests for API Integration (new functionality)
|
|
|
|
func TestCrawler_SetAPIClient(t *testing.T) {
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
|
|
if crawler.apiClient != nil {
|
|
t.Error("Expected nil apiClient initially")
|
|
}
|
|
|
|
crawler.SetAPIClient("http://backend:8000")
|
|
|
|
if crawler.apiClient == nil {
|
|
t.Error("Expected non-nil apiClient after SetAPIClient")
|
|
}
|
|
}
|
|
|
|
func TestCrawler_LoadSeedsFromAPI_NotInitialized(t *testing.T) {
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 3)
|
|
ctx := context.Background()
|
|
|
|
_, err := crawler.LoadSeedsFromAPI(ctx)
|
|
|
|
if err == nil {
|
|
t.Error("Expected error when API client not initialized")
|
|
}
|
|
}
|
|
|
|
func TestCrawler_LoadSeedsFromAPI_Success(t *testing.T) {
|
|
// Create mock server
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
w.Write([]byte(`{
|
|
"seeds": [
|
|
{"url": "https://www.kmk.org", "trust": 0.8, "source": "GOV", "scope": "FEDERAL", "state": "", "depth": 3, "category": "federal"},
|
|
{"url": "https://www.km-bw.de", "trust": 0.7, "source": "GOV", "scope": "STATE", "state": "BW", "depth": 2, "category": "states"}
|
|
],
|
|
"total": 2,
|
|
"exported_at": "2025-01-17T10:00:00Z"
|
|
}`))
|
|
}))
|
|
defer server.Close()
|
|
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, 4)
|
|
crawler.SetAPIClient(server.URL)
|
|
ctx := context.Background()
|
|
|
|
seeds, err := crawler.LoadSeedsFromAPI(ctx)
|
|
|
|
if err != nil {
|
|
t.Fatalf("Unexpected error: %v", err)
|
|
}
|
|
|
|
if len(seeds) != 2 {
|
|
t.Fatalf("Expected 2 seeds, got %d", len(seeds))
|
|
}
|
|
|
|
// Check first seed
|
|
if seeds[0].URL != "https://www.kmk.org" {
|
|
t.Errorf("Expected URL 'https://www.kmk.org', got '%s'", seeds[0].URL)
|
|
}
|
|
if seeds[0].TrustBoost != 0.8 {
|
|
t.Errorf("Expected TrustBoost 0.8, got %f", seeds[0].TrustBoost)
|
|
}
|
|
if seeds[0].Source != "GOV" {
|
|
t.Errorf("Expected Source 'GOV', got '%s'", seeds[0].Source)
|
|
}
|
|
if seeds[0].MaxDepth != 3 {
|
|
t.Errorf("Expected MaxDepth 3, got %d", seeds[0].MaxDepth)
|
|
}
|
|
|
|
// Check second seed with state
|
|
if seeds[1].State != "BW" {
|
|
t.Errorf("Expected State 'BW', got '%s'", seeds[1].State)
|
|
}
|
|
if seeds[1].Category != "states" {
|
|
t.Errorf("Expected Category 'states', got '%s'", seeds[1].Category)
|
|
}
|
|
}
|
|
|
|
func TestCrawler_LoadSeedsFromAPI_DefaultDepth(t *testing.T) {
|
|
// Create mock server with seed that has no depth
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
w.Write([]byte(`{
|
|
"seeds": [
|
|
{"url": "https://www.example.com", "trust": 0.5, "source": "EDU", "scope": "FEDERAL", "state": "", "depth": 0, "category": "edu"}
|
|
],
|
|
"total": 1,
|
|
"exported_at": "2025-01-17T10:00:00Z"
|
|
}`))
|
|
}))
|
|
defer server.Close()
|
|
|
|
defaultDepth := 5
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, defaultDepth)
|
|
crawler.SetAPIClient(server.URL)
|
|
ctx := context.Background()
|
|
|
|
seeds, err := crawler.LoadSeedsFromAPI(ctx)
|
|
|
|
if err != nil {
|
|
t.Fatalf("Unexpected error: %v", err)
|
|
}
|
|
|
|
// When depth is 0 or not specified, it should use crawler's default
|
|
if seeds[0].MaxDepth != defaultDepth {
|
|
t.Errorf("Expected default MaxDepth %d, got %d", defaultDepth, seeds[0].MaxDepth)
|
|
}
|
|
}
|
|
|
|
func TestCrawler_LoadSeedsWithMetadata(t *testing.T) {
|
|
dir := t.TempDir()
|
|
|
|
seedContent := `https://www.kmk.org
|
|
https://www.bildungsserver.de`
|
|
|
|
if err := os.WriteFile(filepath.Join(dir, "seeds.txt"), []byte(seedContent), 0644); err != nil {
|
|
t.Fatal(err)
|
|
}
|
|
|
|
defaultDepth := 4
|
|
crawler := NewCrawler("TestBot/1.0", 1.0, defaultDepth)
|
|
seeds, err := crawler.LoadSeedsWithMetadata(dir)
|
|
|
|
if err != nil {
|
|
t.Fatalf("LoadSeedsWithMetadata failed: %v", err)
|
|
}
|
|
|
|
if len(seeds) != 2 {
|
|
t.Fatalf("Expected 2 seeds, got %d", len(seeds))
|
|
}
|
|
|
|
// Check default values
|
|
for _, seed := range seeds {
|
|
if seed.TrustBoost != 0.5 {
|
|
t.Errorf("Expected default TrustBoost 0.5, got %f", seed.TrustBoost)
|
|
}
|
|
if seed.MaxDepth != defaultDepth {
|
|
t.Errorf("Expected default MaxDepth %d, got %d", defaultDepth, seed.MaxDepth)
|
|
}
|
|
}
|
|
}
|
|
|
|
func TestSeed_Struct(t *testing.T) {
|
|
seed := Seed{
|
|
URL: "https://www.example.com",
|
|
TrustBoost: 0.75,
|
|
Source: "GOV",
|
|
Scope: "STATE",
|
|
State: "BY",
|
|
MaxDepth: 3,
|
|
Category: "states",
|
|
}
|
|
|
|
if seed.URL != "https://www.example.com" {
|
|
t.Errorf("URL mismatch")
|
|
}
|
|
if seed.TrustBoost != 0.75 {
|
|
t.Errorf("TrustBoost mismatch")
|
|
}
|
|
if seed.Source != "GOV" {
|
|
t.Errorf("Source mismatch")
|
|
}
|
|
if seed.Scope != "STATE" {
|
|
t.Errorf("Scope mismatch")
|
|
}
|
|
if seed.State != "BY" {
|
|
t.Errorf("State mismatch")
|
|
}
|
|
if seed.MaxDepth != 3 {
|
|
t.Errorf("MaxDepth mismatch")
|
|
}
|
|
if seed.Category != "states" {
|
|
t.Errorf("Category mismatch")
|
|
}
|
|
}
|