diff --git a/cmd/cli/search/backend_resolution.go b/cmd/cli/search/backend_resolution.go new file mode 100644 index 000000000..2d88e2253 --- /dev/null +++ b/cmd/cli/search/backend_resolution.go @@ -0,0 +1,224 @@ +package search + +import ( + "context" + "errors" + "strings" + + "github.com/docker/model-runner/pkg/distribution/files" + distributionhf "github.com/docker/model-runner/pkg/distribution/huggingface" + "github.com/docker/model-runner/pkg/distribution/oci" + "github.com/docker/model-runner/pkg/distribution/registry" + disttypes "github.com/docker/model-runner/pkg/distribution/types" + "golang.org/x/sync/errgroup" +) + +const ( + backendUnknown = "unknown" + + backendLlamaCpp = "llama.cpp" + backendVLLM = "vllm" + backendDiffusers = "diffusers" + + defaultBackendResolveConcurrency = 4 +) + +type backendResolver interface { + Resolve(ctx context.Context, target string) (string, error) +} + +type registryBackendResolver struct { + lookup func(ctx context.Context, reference string) (disttypes.ModelArtifact, error) +} + +func newRegistryBackendResolver() *registryBackendResolver { + client := registry.NewClient() + return ®istryBackendResolver{ + lookup: client.Model, + } +} + +func (r *registryBackendResolver) Resolve(ctx context.Context, target string) (string, error) { + model, err := r.lookup(ctx, withDefaultTag(target)) + if err != nil { + return backendUnknown, err + } + + config, configErr := model.Config() + if configErr == nil { + if backend := backendFromFormat(config.GetFormat()); backend != backendUnknown { + return backend, nil + } + } + + manifest, manifestErr := model.Manifest() + if manifestErr != nil { + if configErr != nil { + return backendUnknown, errors.Join(configErr, manifestErr) + } + return backendUnknown, manifestErr + } + + if backend := backendFromManifestLayers(manifest); backend != backendUnknown { + return backend, nil + } + + if configErr != nil { + return backendUnknown, configErr + } + + return backendUnknown, nil +} + +type huggingFaceRepoBackendResolver struct { + listFiles func(ctx context.Context, repo, revision string) ([]distributionhf.RepoFile, error) +} + +func newHuggingFaceRepoBackendResolver() *huggingFaceRepoBackendResolver { + client := distributionhf.NewClient(distributionhf.WithUserAgent(registry.DefaultUserAgent)) + return &huggingFaceRepoBackendResolver{ + listFiles: client.ListFiles, + } +} + +func (r *huggingFaceRepoBackendResolver) Resolve(ctx context.Context, target string) (string, error) { + repoFiles, err := r.listFiles(ctx, target, "main") + if err != nil { + return backendUnknown, err + } + return backendFromRepoFiles(repoFiles), nil +} + +func backendFromFormat(format disttypes.Format) string { + switch format { + case disttypes.FormatGGUF: + return backendLlamaCpp + case disttypes.FormatSafetensors: + return backendVLLM + case disttypes.FormatDiffusers: + return backendDiffusers + default: + return backendUnknown + } +} + +func backendFromManifestLayers(manifest *oci.Manifest) string { + if manifest == nil { + return backendUnknown + } + + var backends []string + for _, layer := range manifest.Layers { + //nolint:exhaustive // only backend-relevant media types affect search classification + switch layer.MediaType { + case disttypes.MediaTypeGGUF: + backends = append(backends, backendLlamaCpp) + case disttypes.MediaTypeSafetensors: + backends = append(backends, backendVLLM) + case disttypes.MediaTypeDDUF: + backends = append(backends, backendDiffusers) + default: + continue + } + } + + return joinBackends(backends...) +} + +func backendFromRepoFiles(repoFiles []distributionhf.RepoFile) string { + var backends []string + for _, repoFile := range repoFiles { + if repoFile.Type != "file" { + continue + } + + //nolint:exhaustive // only model weight file types affect search classification + switch files.Classify(repoFile.Filename()) { + case files.FileTypeGGUF: + backends = append(backends, backendLlamaCpp) + case files.FileTypeSafetensors: + backends = append(backends, backendVLLM) + case files.FileTypeDDUF: + backends = append(backends, backendDiffusers) + default: + continue + } + } + + return joinBackends(backends...) +} + +func resolveSearchResultBackends( + ctx context.Context, + results []SearchResult, + resolveConcurrency int, + resolve func(context.Context, SearchResult) (string, error), +) []SearchResult { + if len(results) == 0 { + return results + } + + if resolveConcurrency <= 0 { + resolveConcurrency = defaultBackendResolveConcurrency + } + + resolved := append([]SearchResult(nil), results...) + group, workerCtx := errgroup.WithContext(ctx) + group.SetLimit(resolveConcurrency) + + for i := range resolved { + group.Go(func() error { + backend, err := resolve(workerCtx, resolved[i]) + if err != nil || backend == "" { + resolved[i].Backend = backendUnknown + return nil + } + resolved[i].Backend = backend + return nil + }) + } + + _ = group.Wait() + return resolved +} + +func joinBackends(backends ...string) string { + seen := map[string]bool{} + for _, backend := range backends { + if backend == "" || backend == backendUnknown { + continue + } + seen[backend] = true + } + + ordered := []string{ + backendLlamaCpp, + backendVLLM, + backendDiffusers, + } + + var unique []string + for _, backend := range ordered { + if seen[backend] { + unique = append(unique, backend) + } + } + + if len(unique) == 0 { + return backendUnknown + } + + return strings.Join(unique, ", ") +} + +func withDefaultTag(reference string) string { + lastSlash := strings.LastIndex(reference, "/") + lastColon := strings.LastIndex(reference, ":") + lastDigest := strings.LastIndex(reference, "@") + + if lastColon > lastSlash || lastDigest > lastSlash { + return reference + } + + return reference + ":latest" +} diff --git a/cmd/cli/search/backend_resolution_test.go b/cmd/cli/search/backend_resolution_test.go new file mode 100644 index 000000000..be203cfd0 --- /dev/null +++ b/cmd/cli/search/backend_resolution_test.go @@ -0,0 +1,202 @@ +package search + +import ( + "context" + "fmt" + "testing" + + distributionhf "github.com/docker/model-runner/pkg/distribution/huggingface" + "github.com/docker/model-runner/pkg/distribution/oci" + disttypes "github.com/docker/model-runner/pkg/distribution/types" +) + +func TestBackendFromFormat(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + format disttypes.Format + want string + }{ + {name: "gguf", format: disttypes.FormatGGUF, want: backendLlamaCpp}, + {name: "safetensors", format: disttypes.FormatSafetensors, want: backendVLLM}, + {name: "diffusers", format: disttypes.FormatDiffusers, want: backendDiffusers}, + {name: "unknown", format: "", want: backendUnknown}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := backendFromFormat(tt.format); got != tt.want { + t.Fatalf("backendFromFormat(%q) = %q, want %q", tt.format, got, tt.want) + } + }) + } +} + +func TestBackendFromManifestLayers(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + manifest *oci.Manifest + want string + }{ + { + name: "gguf layer", + manifest: &oci.Manifest{ + Layers: []oci.Descriptor{{MediaType: disttypes.MediaTypeGGUF}}, + }, + want: backendLlamaCpp, + }, + { + name: "dduf layer", + manifest: &oci.Manifest{ + Layers: []oci.Descriptor{{MediaType: disttypes.MediaTypeDDUF}}, + }, + want: backendDiffusers, + }, + { + name: "multiple verified layer types", + manifest: &oci.Manifest{ + Layers: []oci.Descriptor{ + {MediaType: disttypes.MediaTypeSafetensors}, + {MediaType: disttypes.MediaTypeGGUF}, + }, + }, + want: "llama.cpp, vllm", + }, + { + name: "no recognized layers", + manifest: &oci.Manifest{ + Layers: []oci.Descriptor{{MediaType: disttypes.MediaTypeModelFile}}, + }, + want: backendUnknown, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := backendFromManifestLayers(tt.manifest); got != tt.want { + t.Fatalf("backendFromManifestLayers() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestBackendFromRepoFiles(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + repoFiles []distributionhf.RepoFile + want string + }{ + { + name: "gguf and safetensors repo", + repoFiles: []distributionhf.RepoFile{ + {Type: "file", Path: "model.gguf"}, + {Type: "file", Path: "weights/model.safetensors"}, + }, + want: "llama.cpp, vllm", + }, + { + name: "dduf repo", + repoFiles: []distributionhf.RepoFile{ + {Type: "file", Path: "sdxl.dduf"}, + }, + want: backendDiffusers, + }, + { + name: "no recognized files", + repoFiles: []distributionhf.RepoFile{ + {Type: "file", Path: "README.md"}, + }, + want: backendUnknown, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := backendFromRepoFiles(tt.repoFiles); got != tt.want { + t.Fatalf("backendFromRepoFiles() = %q, want %q", got, tt.want) + } + }) + } +} + +func TestWithDefaultTag(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + reference string + want string + }{ + {name: "adds latest", reference: "ai/stable-diffusion", want: "ai/stable-diffusion:latest"}, + {name: "keeps existing tag", reference: "ai/stable-diffusion:Q4", want: "ai/stable-diffusion:Q4"}, + {name: "keeps digest", reference: "ai/stable-diffusion@sha256:deadbeef", want: "ai/stable-diffusion@sha256:deadbeef"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if got := withDefaultTag(tt.reference); got != tt.want { + t.Fatalf("withDefaultTag(%q) = %q, want %q", tt.reference, got, tt.want) + } + }) + } +} + +func TestResolveSearchResultBackendsConcurrent(t *testing.T) { + t.Parallel() + + const numResults = 20 + + results := make([]SearchResult, numResults) + for i := range results { + results[i] = SearchResult{ + Name: fmt.Sprintf("model-%d", i), + Source: "test", + } + } + + wantBackends := make([]string, numResults) + for i := range wantBackends { + switch i % 3 { + case 0: + wantBackends[i] = backendLlamaCpp + case 1: + wantBackends[i] = backendVLLM + case 2: + wantBackends[i] = backendDiffusers + } + } + + resolve := func(_ context.Context, result SearchResult) (string, error) { + for i, r := range results { + if r.Name == result.Name { + return wantBackends[i], nil + } + } + return backendUnknown, nil + } + + resolved := resolveSearchResultBackends(t.Context(), results, numResults, resolve) + + if len(resolved) != numResults { + t.Fatalf("expected %d results, got %d", numResults, len(resolved)) + } + + for i, r := range resolved { + if r.Backend != wantBackends[i] { + t.Errorf("result[%d] (%s): Backend = %q, want %q", i, r.Name, r.Backend, wantBackends[i]) + } + } +} diff --git a/cmd/cli/search/dockerhub.go b/cmd/cli/search/dockerhub.go index e613a083b..e9ca14112 100644 --- a/cmd/cli/search/dockerhub.go +++ b/cmd/cli/search/dockerhub.go @@ -16,15 +16,19 @@ const ( // DockerHubClient searches for models on Docker Hub type DockerHubClient struct { - httpClient *http.Client - baseURL string + httpClient *http.Client + baseURL string + backendResolver backendResolver + resolveConcurrency int } // NewDockerHubClient creates a new Docker Hub search client func NewDockerHubClient() *DockerHubClient { return &DockerHubClient{ - httpClient: NewHTTPClient(), - baseURL: dockerHubBaseURL, + httpClient: NewHTTPClient(), + baseURL: dockerHubBaseURL, + backendResolver: newRegistryBackendResolver(), + resolveConcurrency: defaultBackendResolveConcurrency, } } @@ -129,9 +133,6 @@ func (c *DockerHubClient) Search(ctx context.Context, opts SearchOptions) ([]Sea } } - // Determine backend type from name and description - backend := determineDockerHubBackend(repo.Name, repo.Description) - results = append(results, SearchResult{ Name: fmt.Sprintf("%s/%s", repo.Namespace, repo.Name), Description: truncateString(repo.Description, 50), @@ -140,7 +141,7 @@ func (c *DockerHubClient) Search(ctx context.Context, opts SearchOptions) ([]Sea Source: DockerHubSourceName, Official: repo.Namespace == dockerHubAIOrg, UpdatedAt: repo.LastUpdated, - Backend: backend, + Backend: backendUnknown, }) if len(results) >= limit { @@ -155,7 +156,12 @@ func (c *DockerHubClient) Search(ctx context.Context, opts SearchOptions) ([]Sea nextURL = response.Next } - return results, nil + return resolveSearchResultBackends(ctx, results, c.resolveConcurrency, func(ctx context.Context, result SearchResult) (string, error) { + if c.backendResolver == nil { + return backendUnknown, nil + } + return c.backendResolver.Resolve(ctx, result.Name) + }), nil } // truncateString truncates a string to maxLen characters, adding "..." if truncated @@ -168,36 +174,3 @@ func truncateString(s string, maxLen int) string { } return s[:maxLen-3] + "..." } - -// determineDockerHubBackend determines the backend type from model name and description -func determineDockerHubBackend(name, description string) string { - nameLower := strings.ToLower(name) - descLower := strings.ToLower(description) - combined := nameLower + " " + descLower - - var hasVLLM, hasLlamaCpp bool - - // Check for vLLM indicators - if strings.Contains(combined, "vllm") || strings.Contains(combined, "safetensors") { - hasVLLM = true - } - - // Check for llama.cpp indicators (gguf is the format used by llama.cpp) - if strings.Contains(combined, "llama.cpp") || strings.Contains(combined, "llamacpp") || - strings.Contains(combined, "gguf") || strings.Contains(combined, "llama-cpp") { - hasLlamaCpp = true - } - - if hasVLLM && hasLlamaCpp { - return "llama.cpp, vllm" - } - if hasVLLM { - return "vllm" - } - if hasLlamaCpp { - return "llama.cpp" - } - - // Default to llama.cpp for ai/ namespace models as they primarily use GGUF format - return "llama.cpp" -} diff --git a/cmd/cli/search/dockerhub_test.go b/cmd/cli/search/dockerhub_test.go new file mode 100644 index 000000000..462548b26 --- /dev/null +++ b/cmd/cli/search/dockerhub_test.go @@ -0,0 +1,112 @@ +package search + +import ( + "context" + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "testing" +) + +type fakeBackendResolver struct { + backends map[string]string + errs map[string]error +} + +func (f fakeBackendResolver) Resolve(_ context.Context, target string) (string, error) { + if err, ok := f.errs[target]; ok { + return backendUnknown, err + } + if backend, ok := f.backends[target]; ok { + return backend, nil + } + return backendUnknown, nil +} + +func TestDockerHubSearchUsesVerifiedBackend(t *testing.T) { + t.Parallel() + + server := newDockerHubSearchServer(t, dockerHubRepoListResponse{ + Results: []dockerHubRepo{ + { + Name: "stable-diffusion", + Namespace: "ai", + Description: "Image generation model, uses a base latent diffusion model plus a refiner.", + StarCount: 3, + PullCount: 18900, + LastUpdated: "2026-01-26T11:32:37.220001Z", + }, + }, + }) + defer server.Close() + + client := &DockerHubClient{ + httpClient: server.Client(), + baseURL: server.URL, + backendResolver: fakeBackendResolver{backends: map[string]string{"ai/stable-diffusion": backendDiffusers}}, + resolveConcurrency: 1, + } + + results, err := client.Search(t.Context(), SearchOptions{Query: "stable-diffusion", Limit: 10}) + if err != nil { + t.Fatalf("Search() error = %v", err) + } + + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Backend != backendDiffusers { + t.Fatalf("Backend = %q, want %q", results[0].Backend, backendDiffusers) + } +} + +func TestDockerHubSearchUsesUnknownWhenVerificationFails(t *testing.T) { + t.Parallel() + + server := newDockerHubSearchServer(t, dockerHubRepoListResponse{ + Results: []dockerHubRepo{ + { + Name: "stable-diffusion", + Namespace: "ai", + Description: "Image generation model", + StarCount: 3, + PullCount: 18900, + LastUpdated: "2026-01-26T11:32:37.220001Z", + }, + }, + }) + defer server.Close() + + client := &DockerHubClient{ + httpClient: server.Client(), + baseURL: server.URL, + backendResolver: fakeBackendResolver{errs: map[string]error{"ai/stable-diffusion": errors.New("lookup failed")}}, + resolveConcurrency: 1, + } + + results, err := client.Search(t.Context(), SearchOptions{Query: "stable-diffusion", Limit: 10}) + if err != nil { + t.Fatalf("Search() error = %v", err) + } + + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Backend != backendUnknown { + t.Fatalf("Backend = %q, want %q", results[0].Backend, backendUnknown) + } +} + +func newDockerHubSearchServer(t *testing.T, response dockerHubRepoListResponse) *httptest.Server { + t.Helper() + + mux := http.NewServeMux() + mux.HandleFunc("/v2/repositories/ai/", func(w http.ResponseWriter, r *http.Request) { + if err := json.NewEncoder(w).Encode(response); err != nil { + t.Fatalf("encode response: %v", err) + } + }) + + return httptest.NewServer(mux) +} diff --git a/cmd/cli/search/huggingface.go b/cmd/cli/search/huggingface.go index 174b692d3..a1a44d47f 100644 --- a/cmd/cli/search/huggingface.go +++ b/cmd/cli/search/huggingface.go @@ -15,15 +15,19 @@ const ( // HuggingFaceClient searches for models on HuggingFace Hub type HuggingFaceClient struct { - httpClient *http.Client - baseURL string + httpClient *http.Client + baseURL string + backendResolver backendResolver + resolveConcurrency int } // NewHuggingFaceClient creates a new HuggingFace search client func NewHuggingFaceClient() *HuggingFaceClient { return &HuggingFaceClient{ - httpClient: NewHTTPClient(), - baseURL: huggingFaceAPIURL, + httpClient: NewHTTPClient(), + baseURL: huggingFaceAPIURL, + backendResolver: newHuggingFaceRepoBackendResolver(), + resolveConcurrency: defaultBackendResolveConcurrency, } } @@ -109,9 +113,6 @@ func (c *HuggingFaceClient) Search(ctx context.Context, opts SearchOptions) ([]S // Generate description from tags description := generateDescription(model.Tags, model.PipelineTag) - // Determine backend type from tags - backend := determineBackend(model.Tags) - results = append(results, SearchResult{ Name: modelName, Description: truncateString(description, 50), @@ -120,11 +121,16 @@ func (c *HuggingFaceClient) Search(ctx context.Context, opts SearchOptions) ([]S Source: HuggingFaceSourceName, Official: false, UpdatedAt: model.CreatedAt, - Backend: backend, + Backend: backendUnknown, }) } - return results, nil + return resolveSearchResultBackends(ctx, results, c.resolveConcurrency, func(ctx context.Context, result SearchResult) (string, error) { + if c.backendResolver == nil { + return backendUnknown, nil + } + return c.backendResolver.Resolve(ctx, result.Name) + }), nil } // generateDescription creates a description from model tags @@ -165,42 +171,3 @@ func generateDescription(tags []string, pipelineTag string) string { } return strings.Join(parts, ", ") } - -// determineBackend determines the backend type from HuggingFace model tags. -// Since we filter by apps=vllm,llama.cpp, all results are compatible with at least one backend. -// - GGUF format models work with llama.cpp -// - Transformers/safetensors models work with vLLM -func determineBackend(tags []string) string { - var hasVLLM, hasLlamaCpp bool - - for _, tag := range tags { - tagLower := strings.ToLower(tag) - - // Check for explicit vllm tag or formats that indicate vLLM compatibility - if tagLower == "vllm" || tagLower == "text-generation-inference" { - hasVLLM = true - } - // Transformers/safetensors models are typically vLLM compatible - if tagLower == "transformers" || tagLower == "safetensors" { - hasVLLM = true - } - - // Check for llama.cpp compatibility (GGUF format) - if tagLower == "llama.cpp" || tagLower == "llama-cpp" || tagLower == "gguf" { - hasLlamaCpp = true - } - } - - if hasVLLM && hasLlamaCpp { - return "llama.cpp, vllm" - } - if hasVLLM { - return "vllm" - } - if hasLlamaCpp { - return "llama.cpp" - } - // Fallback: since we filter by apps=vllm,llama.cpp, model must be compatible with one - // but we couldn't determine which from tags - return "llama.cpp" -} diff --git a/cmd/cli/search/huggingface_test.go b/cmd/cli/search/huggingface_test.go new file mode 100644 index 000000000..e6be9934a --- /dev/null +++ b/cmd/cli/search/huggingface_test.go @@ -0,0 +1,92 @@ +package search + +import ( + "encoding/json" + "errors" + "net/http" + "net/http/httptest" + "testing" +) + +func TestHuggingFaceSearchUsesVerifiedBackend(t *testing.T) { + t.Parallel() + + server := newHuggingFaceSearchServer(t, []huggingFaceModel{ + { + ModelID: "stabilityai/stable-diffusion-xl-base-1.0", + Likes: 42, + Downloads: 1000, + Tags: []string{"text-to-image"}, + PipelineTag: "text-to-image", + CreatedAt: "2026-01-26T11:32:37.220001Z", + }, + }) + defer server.Close() + + client := &HuggingFaceClient{ + httpClient: server.Client(), + baseURL: server.URL + "/api", + backendResolver: fakeBackendResolver{backends: map[string]string{"stabilityai/stable-diffusion-xl-base-1.0": backendDiffusers}}, + resolveConcurrency: 1, + } + + results, err := client.Search(t.Context(), SearchOptions{Query: "stable-diffusion", Limit: 10}) + if err != nil { + t.Fatalf("Search() error = %v", err) + } + + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Backend != backendDiffusers { + t.Fatalf("Backend = %q, want %q", results[0].Backend, backendDiffusers) + } +} + +func TestHuggingFaceSearchUsesUnknownWhenVerificationFails(t *testing.T) { + t.Parallel() + + server := newHuggingFaceSearchServer(t, []huggingFaceModel{ + { + ModelID: "foo/bar", + Likes: 1, + Downloads: 2, + Tags: []string{"transformers"}, + PipelineTag: "text-generation", + CreatedAt: "2026-01-26T11:32:37.220001Z", + }, + }) + defer server.Close() + + client := &HuggingFaceClient{ + httpClient: server.Client(), + baseURL: server.URL + "/api", + backendResolver: fakeBackendResolver{errs: map[string]error{"foo/bar": errors.New("lookup failed")}}, + resolveConcurrency: 1, + } + + results, err := client.Search(t.Context(), SearchOptions{Query: "foo", Limit: 10}) + if err != nil { + t.Fatalf("Search() error = %v", err) + } + + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Backend != backendUnknown { + t.Fatalf("Backend = %q, want %q", results[0].Backend, backendUnknown) + } +} + +func newHuggingFaceSearchServer(t *testing.T, response []huggingFaceModel) *httptest.Server { + t.Helper() + + mux := http.NewServeMux() + mux.HandleFunc("/api/models", func(w http.ResponseWriter, r *http.Request) { + if err := json.NewEncoder(w).Encode(response); err != nil { + t.Fatalf("encode response: %v", err) + } + }) + + return httptest.NewServer(mux) +} diff --git a/cmd/cli/search/types.go b/cmd/cli/search/types.go index 32498524a..4c7e6fa06 100644 --- a/cmd/cli/search/types.go +++ b/cmd/cli/search/types.go @@ -17,7 +17,7 @@ type SearchResult struct { Source string // "Docker Hub" or "HuggingFace" Official bool // Whether this is an official model UpdatedAt string // Last update timestamp - Backend string // Backend type: "llama.cpp", "vllm", or "llama.cpp, vllm" if both + Backend string // Backend type: "llama.cpp", "vllm", "diffusers", "unknown", or a comma-separated combination } // SearchOptions configures the search behavior