diff --git a/crates/codegraph-core/src/read_queries.rs b/crates/codegraph-core/src/read_queries.rs index 30852454..405feacc 100644 --- a/crates/codegraph-core/src/read_queries.rs +++ b/crates/codegraph-core/src/read_queries.rs @@ -3,7 +3,7 @@ //! Uses a second `#[napi] impl NativeDatabase` block (Rust allows multiple impl blocks). //! All methods use `conn.prepare_cached()` for automatic statement caching. -use std::collections::{HashSet, VecDeque}; +use std::collections::{HashMap, HashSet, VecDeque}; use napi_derive::napi; use rusqlite::params; @@ -28,6 +28,15 @@ fn escape_like(s: &str) -> String { out } +/// Check if a file path looks like a test file (mirrors `isTestFile` in JS). +fn is_test_file(file: &str) -> bool { + file.contains(".test.") + || file.contains(".spec.") + || file.contains("__test__") + || file.contains("__tests__") + || file.contains(".stories.") +} + /// Build test-file exclusion clauses for a column. fn test_filter_clauses(column: &str) -> String { format!( @@ -1682,4 +1691,323 @@ impl NativeDatabase { Ok(results) } + + // ── Composite Queries ───────────────────────────────────────────────── + + /// Complete fnDeps query in a single native call. + /// + /// Finds matching nodes, collects callees/callers, and runs BFS transitive + /// caller traversal — all in Rust with `prepare_cached` statements. + /// Eliminates per-query NAPI boundary crossings that made the JS-orchestrated + /// version ~3x slower than direct better-sqlite3. + #[napi] + pub fn fn_deps( + &self, + name: String, + depth: Option, + no_tests: Option, + file: Option, + kind: Option, + ) -> napi::Result { + let conn = self.conn()?; + let depth = depth.unwrap_or(3).max(1) as usize; + let no_tests = no_tests.unwrap_or(false); + let lower_query = name.to_lowercase(); + + // ── Step 1: Find matching nodes with fan-in (relevance ranking) ─── + let default_kinds = vec![ + "function".to_string(), + "method".to_string(), + "class".to_string(), + "constant".to_string(), + ]; + let kinds = if let Some(ref k) = kind { + vec![k.clone()] + } else { + default_kinds + }; + + let mut sql = String::from( + "SELECT n.id, n.name, n.kind, n.file, n.line, n.end_line, n.role, \ + COALESCE(fi.cnt, 0) AS fan_in \ + FROM nodes n \ + LEFT JOIN (SELECT target_id, COUNT(*) AS cnt FROM edges WHERE kind = 'calls' GROUP BY target_id) fi \ + ON fi.target_id = n.id \ + WHERE n.name LIKE ?1", + ); + let mut param_values: Vec> = + vec![Box::new(format!("%{name}%"))]; + let mut idx = 2; + + if !kinds.is_empty() { + let placeholders: Vec = + kinds.iter().enumerate().map(|(i, _)| format!("?{}", idx + i)).collect(); + sql.push_str(&format!(" AND n.kind IN ({})", placeholders.join(", "))); + for k in &kinds { + param_values.push(Box::new(k.clone())); + } + idx += kinds.len(); + } + if let Some(ref f) = file { + sql.push_str(&format!(" AND n.file LIKE ?{idx} ESCAPE '\\'")); + param_values.push(Box::new(format!("%{}%", escape_like(f)))); + } + + let params_ref: Vec<&dyn rusqlite::types::ToSql> = + param_values.iter().map(|p| p.as_ref()).collect(); + + struct MatchedNode { + id: i32, + name: String, + kind: String, + file: String, + line: Option, + end_line: Option, + role: Option, + fan_in: i32, + } + + let mut matched: Vec = { + let mut stmt = conn.prepare_cached(&sql) + .map_err(|e| napi::Error::from_reason(format!("fn_deps find_nodes prepare: {e}")))?; + let rows = stmt.query_map(params_ref.as_slice(), |row| { + Ok(MatchedNode { + id: row.get("id")?, + name: row.get("name")?, + kind: row.get("kind")?, + file: row.get("file")?, + line: row.get("line")?, + end_line: row.get("end_line")?, + role: row.get("role")?, + fan_in: row.get("fan_in")?, + }) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps find_nodes: {e}")))?; + rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps find_nodes collect: {e}")))? + }; + + // Filter test files + if no_tests { + matched.retain(|n| !is_test_file(&n.file)); + } + + // Relevance scoring (mirrors JS findMatchingNodes) + matched.sort_by(|a, b| { + let score = |node: &MatchedNode| -> f64 { + let lower_name = node.name.to_lowercase(); + let bare_name = lower_name.rsplit('.').next().unwrap_or(&lower_name); + let match_score = if lower_name == lower_query || bare_name == lower_query { + 100.0 + } else if lower_name.starts_with(&lower_query) || bare_name.starts_with(&lower_query) { + 60.0 + } else if lower_name.contains(&format!(".{lower_query}")) || lower_name.contains(&format!("{lower_query}.")) { + 40.0 + } else { + 10.0 + }; + let fan_in_bonus = ((node.fan_in as f64 + 1.0).log2() * 5.0).min(25.0); + match_score + fan_in_bonus + }; + score(b).partial_cmp(&score(a)).unwrap_or(std::cmp::Ordering::Equal) + }); + + // ── Step 2: Build result for each matched node ──────────────────── + let mut file_hash_cache: HashMap> = HashMap::new(); + + let mut results = Vec::with_capacity(matched.len()); + for node in &matched { + // Callees + let callees: Vec = { + let mut stmt = conn.prepare_cached( + "SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line \ + FROM edges e JOIN nodes n ON e.target_id = n.id \ + WHERE e.source_id = ?1 AND e.kind = 'calls'" + ).map_err(|e| napi::Error::from_reason(format!("fn_deps callees prepare: {e}")))?; + let rows = stmt.query_map(params![node.id], |row| { + Ok(FnDepsNode { + name: row.get("name")?, + kind: row.get("kind")?, + file: row.get("file")?, + line: row.get("line")?, + }) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps callees: {e}")))?; + let mut v: Vec = rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps callees collect: {e}")))?; + if no_tests { + v.retain(|c| !is_test_file(&c.file)); + } + v + }; + + // Callers (direct) — query includes `id` for BFS reuse + struct CallerWithId { id: i32, name: String, kind: String, file: String, line: Option, via_hierarchy: Option } + let mut callers_with_id: Vec = { + let mut stmt = conn.prepare_cached( + "SELECT n.id, n.name, n.kind, n.file, n.line \ + FROM edges e JOIN nodes n ON e.source_id = n.id \ + WHERE e.target_id = ?1 AND e.kind = 'calls'" + ).map_err(|e| napi::Error::from_reason(format!("fn_deps callers prepare: {e}")))?; + let rows = stmt.query_map(params![node.id], |row| { + Ok(CallerWithId { + id: row.get("id")?, + name: row.get("name")?, + kind: row.get("kind")?, + file: row.get("file")?, + line: row.get("line")?, + via_hierarchy: None, + }) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps callers: {e}")))?; + rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps callers collect: {e}")))? + }; + + // Method hierarchy resolution + if node.kind == "method" && node.name.contains('.') { + if let Some(method_name) = node.name.split('.').last() { + let pattern = format!("%.{method_name}"); + let related: Vec<(i32, String)> = { + let mut stmt = conn.prepare_cached( + "SELECT n.id, n.name FROM nodes n \ + LEFT JOIN (SELECT target_id, COUNT(*) AS cnt FROM edges WHERE kind = 'calls' GROUP BY target_id) fi \ + ON fi.target_id = n.id \ + WHERE n.name LIKE ?1 AND n.kind = 'method'" + ).map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy prepare: {e}")))?; + let rows = stmt.query_map(params![pattern], |row| { + Ok((row.get::<_, i32>("id")?, row.get::<_, String>("name")?)) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy: {e}")))?; + rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy collect: {e}")))? + }; + for (rm_id, rm_name) in &related { + if *rm_id == node.id { continue; } + let mut stmt = conn.prepare_cached( + "SELECT n.id, n.name, n.kind, n.file, n.line \ + FROM edges e JOIN nodes n ON e.source_id = n.id \ + WHERE e.target_id = ?1 AND e.kind = 'calls'" + ).map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy callers prepare: {e}")))?; + let rows = stmt.query_map(params![rm_id], |row| { + Ok(CallerWithId { + id: row.get("id")?, + name: row.get("name")?, + kind: row.get("kind")?, + file: row.get("file")?, + line: row.get("line")?, + via_hierarchy: Some(rm_name.clone()), + }) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy callers: {e}")))?; + let extra: Vec = rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps hierarchy callers collect: {e}")))?; + callers_with_id.extend(extra); + } + } + } + if no_tests { + callers_with_id.retain(|c| !is_test_file(&c.file)); + } + + // Convert to FnDepsCallerNode for output (strip id) + let callers: Vec = callers_with_id.iter().map(|c| FnDepsCallerNode { + name: c.name.clone(), + kind: c.kind.clone(), + file: c.file.clone(), + line: c.line, + via_hierarchy: c.via_hierarchy.clone(), + }).collect(); + + // BFS transitive callers — reuse callers_with_id as initial frontier + let transitive_callers = if depth > 1 { + let mut visited = HashSet::new(); + visited.insert(node.id); + let initial_frontier: Vec = callers_with_id.iter().map(|c| CallerWithId { + id: c.id, name: c.name.clone(), kind: c.kind.clone(), file: c.file.clone(), line: c.line, via_hierarchy: c.via_hierarchy.clone(), + }).collect(); + let mut frontier: Vec = initial_frontier; + let mut groups: Vec = Vec::new(); + + for d in 2..=depth { + let unvisited: Vec<&CallerWithId> = frontier.iter() + .filter(|f| !visited.contains(&f.id)) + .collect(); + for f in &unvisited { + visited.insert(f.id); + } + if unvisited.is_empty() { break; } + + // Batch query: find all callers of the unvisited frontier + let mut next_frontier: Vec = Vec::new(); + let mut next_ids = HashSet::new(); + for f in &unvisited { + let mut stmt = conn.prepare_cached( + "SELECT n.id, n.name, n.kind, n.file, n.line \ + FROM edges e JOIN nodes n ON e.source_id = n.id \ + WHERE e.target_id = ?1 AND e.kind = 'calls'" + ).map_err(|e| napi::Error::from_reason(format!("fn_deps bfs prepare: {e}")))?; + let rows = stmt.query_map(params![f.id], |row| { + Ok(CallerWithId { + id: row.get("id")?, + name: row.get("name")?, + kind: row.get("kind")?, + file: row.get("file")?, + line: row.get("line")?, + via_hierarchy: None, + }) + }).map_err(|e| napi::Error::from_reason(format!("fn_deps bfs: {e}")))?; + let upstream: Vec = rows.collect::, _>>() + .map_err(|e| napi::Error::from_reason(format!("fn_deps bfs collect: {e}")))?; + for u in upstream { + if no_tests && is_test_file(&u.file) { continue; } + if !visited.contains(&u.id) && !next_ids.contains(&u.id) { + next_ids.insert(u.id); + next_frontier.push(u); + } + } + } + + if !next_frontier.is_empty() { + groups.push(FnDepsTransitiveGroup { + depth: d as i32, + callers: next_frontier.iter().map(|n| FnDepsNode { + name: n.name.clone(), + kind: n.kind.clone(), + file: n.file.clone(), + line: n.line, + }).collect(), + }); + } + frontier = next_frontier; + } + groups + } else { + Vec::new() + }; + + // File hash (cached) + let file_hash = if !file_hash_cache.contains_key(&node.file) { + let hash: Option = conn.prepare_cached( + "SELECT hash FROM file_hashes WHERE file = ?1" + ).ok().and_then(|mut stmt| { + stmt.query_row(params![node.file], |row| row.get(0)).ok() + }); + file_hash_cache.insert(node.file.clone(), hash.clone()); + hash + } else { + file_hash_cache.get(&node.file).cloned().flatten() + }; + + results.push(FnDepsEntry { + name: node.name.clone(), + kind: node.kind.clone(), + file: node.file.clone(), + line: node.line, + end_line: node.end_line, + role: node.role.clone(), + file_hash, + callees, + callers, + transitive_callers, + }); + } + + Ok(FnDepsResult { name, results }) + } } diff --git a/crates/codegraph-core/src/read_types.rs b/crates/codegraph-core/src/read_types.rs index ffd966e2..73e4e319 100644 --- a/crates/codegraph-core/src/read_types.rs +++ b/crates/codegraph-core/src/read_types.rs @@ -300,3 +300,58 @@ pub struct FanMetric { pub fan_in: i32, pub fan_out: i32, } + +// ── Composite query return types (fnDeps) ───────────────────────────── + +/// A single caller/callee node in fnDeps results. +#[napi(object)] +#[derive(Debug, Clone)] +pub struct FnDepsNode { + pub name: String, + pub kind: String, + pub file: String, + pub line: Option, +} + +/// A caller node with optional hierarchy resolution info. +#[napi(object)] +#[derive(Debug, Clone)] +pub struct FnDepsCallerNode { + pub name: String, + pub kind: String, + pub file: String, + pub line: Option, + pub via_hierarchy: Option, +} + +/// A group of transitive callers at a specific BFS depth. +#[napi(object)] +#[derive(Debug, Clone)] +pub struct FnDepsTransitiveGroup { + pub depth: i32, + pub callers: Vec, +} + +/// A single symbol's dependency entry in the fnDeps result. +#[napi(object)] +#[derive(Debug, Clone)] +pub struct FnDepsEntry { + pub name: String, + pub kind: String, + pub file: String, + pub line: Option, + pub end_line: Option, + pub role: Option, + pub file_hash: Option, + pub callees: Vec, + pub callers: Vec, + pub transitive_callers: Vec, +} + +/// Complete fnDeps result returned from a single native call. +#[napi(object)] +#[derive(Debug, Clone)] +pub struct FnDepsResult { + pub name: String, + pub results: Vec, +} diff --git a/src/db/repository/base.ts b/src/db/repository/base.ts index 74e1a8c9..4c1546c2 100644 --- a/src/db/repository/base.ts +++ b/src/db/repository/base.ts @@ -223,4 +223,47 @@ export class Repository implements IRepository { hasCoChangesTable(): boolean { return false; } + + // ── Composite queries ────────────────────────────────────────────── + /** + * Complete fnDeps query in a single call. Returns null when not natively + * supported — callers should fall back to the JS-orchestrated path. + */ + fnDeps( + _name: string, + _opts?: { depth?: number; noTests?: boolean; file?: string; kind?: string }, + ): FnDepsResult | null { + return null; + } +} + +// ── Composite query result types ──────────────────────────────────────── + +export interface FnDepsNode { + name: string; + kind: string; + file: string; + line: number | null; +} + +export interface FnDepsCallerNode extends FnDepsNode { + viaHierarchy?: string; +} + +export interface FnDepsEntry { + name: string; + kind: string; + file: string; + line: number | null; + endLine: number | null; + role: string | null; + fileHash: string | null; + callees: FnDepsNode[]; + callers: FnDepsCallerNode[]; + transitiveCallers: Record; +} + +export interface FnDepsResult { + name: string; + results: FnDepsEntry[]; } diff --git a/src/db/repository/index.ts b/src/db/repository/index.ts index 1f59c135..f7b4826f 100644 --- a/src/db/repository/index.ts +++ b/src/db/repository/index.ts @@ -1,5 +1,6 @@ // Barrel re-export for repository/ modules. +export type { FnDepsCallerNode, FnDepsEntry, FnDepsNode, FnDepsResult } from './base.js'; export { Repository } from './base.js'; export { purgeFileData, purgeFilesData } from './build-stmts.js'; export { cachedStmt } from './cached-stmt.js'; diff --git a/src/db/repository/native-repository.ts b/src/db/repository/native-repository.ts index 5182705c..90e1d006 100644 --- a/src/db/repository/native-repository.ts +++ b/src/db/repository/native-repository.ts @@ -45,7 +45,13 @@ import type { TriageNodeRow, TriageQueryOpts, } from '../../types.js'; -import { Repository } from './base.js'; +import { + type FnDepsCallerNode, + type FnDepsEntry, + type FnDepsNode, + type FnDepsResult, + Repository, +} from './base.js'; // ── Row converters (napi camelCase → Repository snake_case) ───────────── @@ -461,4 +467,64 @@ export class NativeRepository extends Repository { } return false; } + + // ── Composite queries ────────────────────────────────────────────── + fnDeps( + name: string, + opts?: { depth?: number; noTests?: boolean; file?: string; kind?: string }, + ): FnDepsResult | null { + if (typeof this.#ndb.fnDeps !== 'function') return null; + const raw = this.#ndb.fnDeps( + name, + opts?.depth ?? undefined, + opts?.noTests ?? undefined, + opts?.file ?? undefined, + opts?.kind ?? undefined, + ); + // Convert from native format (transitive_callers as array of groups) + // to JS format (transitiveCallers as Record) + return { + name: raw.name, + results: raw.results.map((entry: any): FnDepsEntry => { + const transitiveCallers: Record = {}; + for (const group of entry.transitiveCallers ?? []) { + transitiveCallers[group.depth] = (group.callers ?? []).map( + (c: any): FnDepsNode => ({ + name: c.name, + kind: c.kind, + file: c.file, + line: c.line ?? null, + }), + ); + } + return { + name: entry.name, + kind: entry.kind, + file: entry.file, + line: entry.line ?? null, + endLine: entry.endLine ?? entry.end_line ?? null, + role: entry.role ?? null, + fileHash: entry.fileHash ?? entry.file_hash ?? null, + callees: (entry.callees ?? []).map( + (c: any): FnDepsNode => ({ + name: c.name, + kind: c.kind, + file: c.file, + line: c.line ?? null, + }), + ), + callers: (entry.callers ?? []).map( + (c: any): FnDepsCallerNode => ({ + name: c.name, + kind: c.kind, + file: c.file, + line: c.line ?? null, + viaHierarchy: c.viaHierarchy ?? c.via_hierarchy ?? undefined, + }), + ), + transitiveCallers, + }; + }), + }; + } } diff --git a/src/domain/analysis/dependencies.ts b/src/domain/analysis/dependencies.ts index 84020a6e..730f5309 100644 --- a/src/domain/analysis/dependencies.ts +++ b/src/domain/analysis/dependencies.ts @@ -173,6 +173,19 @@ export function fnDepsData( } = {}, ) { return withRepo(customDbPath, (repo) => { + // Try native composite path — single NAPI call for the entire query. + const nativeResult = repo.fnDeps(name, { + depth: opts.depth, + noTests: opts.noTests, + file: opts.file, + kind: opts.kind, + }); + if (nativeResult) { + const base = { name: nativeResult.name, results: nativeResult.results }; + return paginateResult(base, 'results', { limit: opts.limit, offset: opts.offset }); + } + + // Fallback: JS-orchestrated path (used when native engine is unavailable) const depth = opts.depth || 3; const noTests = opts.noTests || false; const hc = new Map(); diff --git a/src/types.ts b/src/types.ts index 79aa04ee..d57ebfc2 100644 --- a/src/types.ts +++ b/src/types.ts @@ -334,6 +334,35 @@ export interface Repository { getFileHash(file: string): string | null; hasImplementsEdges(): boolean; hasCoChangesTable(): boolean; + + // ── Composite queries ────────────────────────────────────────────── + fnDeps( + name: string, + opts?: { depth?: number; noTests?: boolean; file?: string; kind?: string }, + ): { + name: string; + results: Array<{ + name: string; + kind: string; + file: string; + line: number | null; + endLine: number | null; + role: string | null; + fileHash: string | null; + callees: Array<{ name: string; kind: string; file: string; line: number | null }>; + callers: Array<{ + name: string; + kind: string; + file: string; + line: number | null; + viaHierarchy?: string; + }>; + transitiveCallers: Record< + number, + Array<{ name: string; kind: string; file: string; line: number | null }> + >; + }>; + } | null; } /** @@ -2055,6 +2084,46 @@ export interface NativeComplexityMetrics { halsteadVolume: number | null; } +// ── Native composite query types (fnDeps) ────────────────────────────── + +export interface NativeFnDepsNode { + name: string; + kind: string; + file: string; + line: number | null; +} + +export interface NativeFnDepsCallerNode { + name: string; + kind: string; + file: string; + line: number | null; + viaHierarchy: string | null; +} + +export interface NativeFnDepsTransitiveGroup { + depth: number; + callers: NativeFnDepsNode[]; +} + +export interface NativeFnDepsEntry { + name: string; + kind: string; + file: string; + line: number | null; + endLine: number | null; + role: string | null; + fileHash: string | null; + callees: NativeFnDepsNode[]; + callers: NativeFnDepsCallerNode[]; + transitiveCallers: NativeFnDepsTransitiveGroup[]; +} + +export interface NativeFnDepsResult { + name: string; + results: NativeFnDepsEntry[]; +} + /** Native rusqlite database wrapper instance (Phase 6.13 + 6.14 + 6.15). */ export interface NativeDatabase { // ── Lifecycle (6.13) ──────────────────────────────────────────────── @@ -2139,6 +2208,15 @@ export interface NativeDatabase { getComplexityForNode(nodeId: number): NativeComplexityMetrics | null; getFileHash(file: string): string | null; + // ── Composite queries ────────────────────────────────────────────── + fnDeps( + name: string, + depth: number | null | undefined, + noTests: boolean | null | undefined, + file: string | null | undefined, + kind: string | null | undefined, + ): NativeFnDepsResult; + // ── Build pipeline writes (6.15) ─────────────────────────────────── bulkInsertNodes( batches: Array<{